Esempio n. 1
0
def clean_up_containers(container_info,job_id,is_batch_job,which_untrusted,submission_path,grading_began,use_router):
    # First, clean up the dockers.
    for name, info in container_info.items():
        c_id = info['container_id']
        subprocess.call(['docker', 'rm', '-f', c_id])

        dockerdestroy_done=dateutils.get_current_time()
        dockerdestroy_time = (dockerdestroy_done-grading_began).total_seconds()
        grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,"ddt:",
                                        dockerdestroy_time, "docker container {0} destroyed".format(name))

    if not use_router:
      network_name = '{0}_routerless_network'.format(which_untrusted)
      subprocess.call(['docker', 'network', 'rm', network_name])
      network_destroy_done=dateutils.get_current_time()
      network_destroy_time = (network_destroy_done-grading_began).total_seconds()
      grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,"ddt:",
                                      network_destroy_time,"docker network {0} destroyed".format(network_name))
    else:
      #Networks must be removed AFTER all docker endpoints have been shut down.
      for name, info in container_info.items():
          if 'network' in info:
              network_name = info['network']
              subprocess.call(['docker', 'network', 'rm', network_name])
              network_destroy_done=dateutils.get_current_time()
              network_destroy_time = (network_destroy_done-grading_began).total_seconds()
              grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,"ddt:",
                                              network_destroy_time,"docker network {0} destroyed".format(network_name))
def clean_up_containers(container_info,job_id,is_batch_job,which_untrusted,submission_path,grading_began,use_router):
    # First, clean up the dockers.
    for name, info in container_info.items():
        if not "container_id" in info:
          continue
        c_id = info['container_id']
        subprocess.call(['docker', 'rm', '-f', c_id])

        dockerdestroy_done=dateutils.get_current_time()
        dockerdestroy_time = (dockerdestroy_done-grading_began).total_seconds()
        grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,"ddt:",
                                        dockerdestroy_time, "docker container {0} destroyed".format(name))

    if not use_router:
      network_name = '{0}_routerless_network'.format(which_untrusted)
      subprocess.call(['docker', 'network', 'rm', network_name])
      network_destroy_done=dateutils.get_current_time()
      network_destroy_time = (network_destroy_done-grading_began).total_seconds()
      grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,"ddt:",
                                      network_destroy_time,"docker network {0} destroyed".format(network_name))
    else:
      #Networks must be removed AFTER all docker endpoints have been shut down.
      for name, info in container_info.items():
          if 'network' in info:
              network_name = info['network']
              subprocess.call(['docker', 'network', 'rm', network_name])
              network_destroy_done=dateutils.get_current_time()
              network_destroy_time = (network_destroy_done-grading_began).total_seconds()
              grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,"ddt:",
                                              network_destroy_time,"docker network {0} destroyed".format(network_name))
Esempio n. 3
0
    def create(self, execution_script, arguments, more_than_one):
        """ Create (but don't start) this container. """

        # Only pass container name to testcases with greater than one container. (Doing otherwise breaks compilation)
        conatiner_name_argument = ['--container_name', self.name
                                   ] if more_than_one else list()
        # A server container does not run student code, but instead hosts a service (e.g. a database.)
        if self.is_server:
            container_id = subprocess.check_output([
                'docker', 'create', '-i', '--network', 'none', '-v',
                f'{self.directory}:{self.directory}', '-w', self.directory,
                '--name', self.full_name, self.image
            ]).decode('utf8').strip()
        else:
            this_container = subprocess.check_output(
                ['docker', 'create', '-i', '--network', 'none'] +
                self.container_user_argument + [
                    '-v', self.directory + ':' + self.directory, '-w',
                    self.directory, '--hostname', self.name, '--name',
                    self.full_name, self.image, execution_script
                ] + arguments +
                conatiner_name_argument).decode('utf8').strip()
        dockerlaunch_done = dateutils.get_current_time()
        self.log_function(f'docker container {this_container} created')
        self.container_id = this_container
Esempio n. 4
0
def log_message(log_path,
                job_id="UNKNOWN",
                is_batch=False,
                which_untrusted="",
                jobname="",
                timelabel="",
                elapsed_time=-1,
                message=""):
    """ Given a log directory, create or append a message to a dated log file in that directory. """

    now = dateutils.get_current_time()
    datefile = datetime.strftime(now, "%Y%m%d") + ".txt"
    autograding_log_file = os.path.join(log_path, datefile)
    easy_to_read_date = dateutils.write_submitty_date(now, True)
    batch_string = "BATCH" if is_batch else ""
    if elapsed_time == "":
        elapsed_time = -1
    elapsed_time_string = "" if elapsed_time < 0 else '{:9.3f}'.format(
        elapsed_time)
    time_unit = "" if elapsed_time < 0 else "sec"
    with open(autograding_log_file, 'a') as myfile:
        try:
            fcntl.flock(myfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
            print(
                "%s | %6s | %5s | %11s | %-75s | %-6s %9s %3s | %s" %
                (easy_to_read_date, job_id, batch_string, which_untrusted,
                 jobname, timelabel, elapsed_time_string, time_unit, message),
                file=myfile)
            fcntl.flock(myfile, fcntl.LOCK_UN)
        except:
            print("Could not gain a lock on the log file.")
Esempio n. 5
0
def log_message(log_path,
                job_id="UNKNOWN",
                is_batch=False,
                which_untrusted="",
                jobname="",
                timelabel="",
                elapsed_time=-1,
                message=""):
    """ Given a log directory, create or append a message to a dated log file in that directory. """

    now = dateutils.get_current_time()
    datefile = datetime.strftime(now, "%Y%m%d") + ".txt"
    autograding_log_file = os.path.join(log_path, datefile)
    easy_to_read_date = dateutils.write_submitty_date(now, True)
    batch_string = "BATCH" if is_batch else ""
    if elapsed_time == "":
        elapsed_time = -1
    elapsed_time_string = "" if elapsed_time < 0 else '{:9.3f}'.format(
        elapsed_time)
    time_unit = "" if elapsed_time < 0 else "sec"
    parts = (easy_to_read_date, f"{job_id:>6s}", f"{batch_string:>5s}",
             f"{which_untrusted:>11s}", f"{jobname:75s}",
             f"{timelabel:6s} {elapsed_time_string:>9s} {time_unit:>3s}",
             message)
    write_to_log(autograding_log_file, parts)
Esempio n. 6
0
    def _log_filename(self) -> str:
        """Get the name of the file that should be logged into.

        Currently, this is in the format YYYYMMDD.txt.
        """
        now = dateutils.get_current_time()
        return f'{datetime.strftime(now, "%Y%m%d")}.txt'
Esempio n. 7
0
def log_stack_trace(job_id="UNKNOWN",
                    is_batch=False,
                    which_untrusted="",
                    jobname="",
                    timelabel="",
                    elapsed_time=-1,
                    trace=""):
    now = dateutils.get_current_time()
    datefile = "stack_traces_{0}.txt".format(datetime.strftime(now, "%Y%m%d"))
    autograding_log_file = os.path.join(AUTOGRADING_STACKTRACE_PATH, datefile)
    easy_to_read_date = dateutils.write_submitty_date(now, True)
    batch_string = "BATCH" if is_batch else ""
    if elapsed_time == "":
        elapsed_time = -1
    elapsed_time_string = "" if elapsed_time < 0 else '{:9.3f}'.format(
        elapsed_time)
    time_unit = "" if elapsed_time < 0 else "sec"
    with open(autograding_log_file, 'a') as myfile:
        try:
            fcntl.flock(myfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
            print("%s | %6s | %5s | %11s | %-75s | %-6s %9s %3s |\n%s" %
                  (easy_to_read_date, job_id, batch_string, which_untrusted,
                   jobname, timelabel, elapsed_time_string, time_unit, trace),
                  file=myfile)
            fcntl.flock(myfile, fcntl.LOCK_UN)
        except:
            print("Could not gain a lock on the log file.")
Esempio n. 8
0
  def create(self, execution_script, arguments, more_than_one):
    """ Create (but don't start) this container. """

    client = docker.from_env()

    mount = {
        self.directory : {
          'bind' : self.directory,
          'mode' : 'rw'
        }
      }

    # Only pass container name to testcases with greater than one container. (Doing otherwise breaks compilation)
    container_name_argument = ['--container_name', self.name] if more_than_one else list()
    # A server container does not run student code, but instead hosts a service (e.g. a database.)

    try:
      if self.is_server:
        self.container = client.containers.create(self.image, stdin_open = True, tty = True, network = 'none',
                                                  volumes = mount, working_dir = self.directory, name = self.full_name)
      else:
        container_ulimits = rlimit_utils.build_ulimit_argument(self.container_rlimits, self.image)
        command = [execution_script,] + arguments + container_name_argument
        self.container = client.containers.create(self.image, command = command, ulimits = container_ulimits, stdin_open = True,
                                                  tty = True, network = 'none', user = self.container_user_argument, volumes=mount,
                                                  working_dir = self.directory, hostname = self.name, name = self.full_name)
    except docker.errors.ImageNotFound:
      self.log_function(f'ERROR: The image {self.image} is not available on this worker')
      client.close()
      raise

    dockerlaunch_done = dateutils.get_current_time()
    self.log_function(f'docker container {self.container.short_id} created')
    client.close()
def create_container(container_name, container_image, server_container,
                     mounted_directory, job_id, is_batch_job, which_untrusted,
                     submission_path, grading_began, queue_obj,
                     submission_string, testcase_num, name):

    untrusted_uid = str(getpwnam(which_untrusted).pw_uid)

    if server_container:
        this_container = subprocess.check_output([
            'docker', 'create', '-i', '--network', 'none', '-v',
            mounted_directory + ':' + mounted_directory, '-w',
            mounted_directory, '--name', container_name, container_image
        ]).decode('utf8').strip()
    else:
        this_container = subprocess.check_output([
            'docker', 'create', '-i', '-u', untrusted_uid, '--network', 'none',
            '-v', mounted_directory + ':' + mounted_directory, '-w',
            mounted_directory, '--hostname', name, '--name', container_name,
            container_image,
            os.path.join(mounted_directory, 'my_runner.out'),
            queue_obj['gradeable'], queue_obj['who'],
            str(queue_obj['version']), submission_string,
            str(testcase_num), name
        ]).decode('utf8').strip()

    dockerlaunch_done = dateutils.get_current_time()
    dockerlaunch_time = (dockerlaunch_done - grading_began).total_seconds()
    grade_items_logging.log_message(
        job_id, is_batch_job, which_untrusted, submission_path, "dcct:",
        dockerlaunch_time,
        "docker container {0} created".format(this_container))
    return this_container
Esempio n. 10
0
def log_container_meta(log_path, event="", name="", container="", time=0):
    """ Given a log file, create or append container meta data to a log file. """

    now = dateutils.get_current_time()
    easy_to_read_date = dateutils.write_submitty_date(now, True)
    time_unit = "sec"
    parts = (easy_to_read_date, name, container, event, f"{time:.3f}", time_unit)
    write_to_log(log_path, parts)
Esempio n. 11
0
def logMessage(message):
    """
    Write a message to the submitty system logs folder to aid in debugging.
    """
    now = dateutils.get_current_time()
    now_filename = datetime.strftime(now, "%Y%m%d")
    filename = f"/var/local/submitty/logs/daemon_job_queue/{now_filename}.txt"
    pid = os.getpid()
    now_format = datetime.strftime(now, "%Y-%m-%d %H:%M:%S")
    dated_message = f"{now_format} | {pid:>7} | {message}"
    with open(filename, "a") as logfile:
        logfile.write(dated_message + "\n")
        logfile.flush()
    print(dated_message, flush=True)
Esempio n. 12
0
def launch_container(container_name, container_image, mounted_directory,
                     job_id, is_batch_job, which_untrusted, submission_path,
                     grading_began):
    #TODO error handling.
    this_container = subprocess.check_output([
        'docker', 'run', '-t', '-d', '-v',
        mounted_directory + ':' + mounted_directory, '--name', container_name,
        container_image
    ]).decode('utf8').strip()
    dockerlaunch_done = dateutils.get_current_time()
    dockerlaunch_time = (dockerlaunch_done - grading_began).total_seconds()
    grade_items_logging.log_message(
        job_id, is_batch_job, which_untrusted, submission_path, "dcct:",
        dockerlaunch_time,
        "docker container {0} created".format(this_container))
    return this_container
Esempio n. 13
0
def log_message(is_batch, which_untrusted, jobname, timelabel, elapsed_time,
                message):
    now = dateutils.get_current_time()
    datefile = datetime.strftime(now, "%Y%m%d") + ".txt"
    autograding_log_file = os.path.join(AUTOGRADING_LOG_PATH, datefile)
    easy_to_read_date = dateutils.write_submitty_date(now)
    my_pid = os.getpid()
    parent_pid = os.getppid()
    batch_string = "BATCH" if is_batch else ""
    abbrev_jobname = jobname[len(SUBMITTY_DATA_DIR + "/courses/"):]
    time_unit = "" if elapsed_time == "" else "sec"
    with open(autograding_log_file, 'a') as myfile:
        fcntl.flock(myfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
        print("%s | %6s | %5s | %11s | %-75s | %-6s %5s %3s | %s" %
              (easy_to_read_date, my_pid, batch_string, which_untrusted,
               abbrev_jobname, timelabel, elapsed_time, time_unit, message),
              file=myfile)
        fcntl.flock(myfile, fcntl.LOCK_UN)
Esempio n. 14
0
def generateRandomSuperuserEmail(recipients):
    with open(os.path.join(SETUP_DATA_PATH, 'random', 'SuperuserEmailBody.txt')) as body_file, \
            open(os.path.join(SETUP_DATA_PATH, 'random', 'SuperuserSubject.txt')) as subject_file:
        body = random.choice(body_file.read().strip().split('\n'))
        subject = random.choice(subject_file.read().strip().split('\n'))
    now = dateutils.get_current_time()
    emails = []
    for recipient in recipients:
        emails.append(
            {
                "user_id": recipient.user_id,
                "subject": "[Submitty Admin Announcement]: " + subject,
                "body": body,
                "created": now,
                "email_address": recipient.user_email,
                "semester": None,
                "course": None
            }
        )
    return emails
Esempio n. 15
0
def log_stack_trace(job_id="UNKNOWN", is_batch=False, which_untrusted="", jobname="", timelabel="", elapsed_time=-1, trace=""):
    now = dateutils.get_current_time()
    datefile = "stack_traces_{0}.txt".format(datetime.strftime(now, "%Y%m%d"))
    autograding_log_file = os.path.join(AUTOGRADING_STACKTRACE_PATH, datefile)
    easy_to_read_date = dateutils.write_submitty_date(now, True)
    batch_string = "BATCH" if is_batch else ""
    if elapsed_time == "":
        elapsed_time = -1
    elapsed_time_string = "" if elapsed_time < 0 else '{:9.3f}'.format(elapsed_time)
    time_unit = "" if elapsed_time < 0 else "sec"
    with open(autograding_log_file, 'a') as myfile:
        try:
            fcntl.flock(myfile,fcntl.LOCK_EX | fcntl.LOCK_NB)
            print("%s | %6s | %5s | %11s | %-75s | %-6s %9s %3s |\n%s"
                  % (easy_to_read_date, job_id, batch_string, which_untrusted,
                     jobname, timelabel, elapsed_time_string, time_unit, trace),
                  file=myfile)
            fcntl.flock(myfile, fcntl.LOCK_UN)
        except:
            print("Could not gain a lock on the log file.")
Esempio n. 16
0
    def log_stack_trace(
        self,
        trace: str,
        *,
        is_batch: bool = False,
        which_untrusted: str = '',
        job_id: Optional[str] = None,
        jobname: str = "",
        echo_source: Optional[str] = None,
    ):
        """Log a stack trace to this logger's configured stack trace directory."""
        job_id = job_id or self.job_id
        # Save the parameters to this trace so we can duplicate these on the
        # shipper's end once the job finishes.
        #
        # TODO: Maybe we want to store time info too? Might need to think a bit
        #       more in terms of the stack traces log file format.
        if self.capture_traces:
            self.accumulated_traces.append({
                'trace': trace,
                'is_batch': is_batch,
                'which_untrusted': which_untrusted,
                'job_id': job_id,
                'jobname': jobname,
            })
        # Always run this since this could be deleted without us knowing
        os.makedirs(self.stack_trace_dir, exist_ok=True)

        now = dateutils.get_current_time()
        easy_to_read_date = dateutils.write_submitty_date(now, True)

        message = f"[{easy_to_read_date}][{job_id:>6s}]\n"
        if echo_source is not None:
            message += f"== (Echoed from {echo_source})\n"
        message += f"== Batch? {is_batch}\n"
        message += f"== Which: {which_untrusted}\n"
        message += f"==   Job: {jobname}\n"
        for line in trace.splitlines():
            message += f"== {line}\n"
        message = message.strip()
        write_to_log(self.stack_trace_path, message)
Esempio n. 17
0
def generateRandomCourseEmail(recipients, course):
    with open(os.path.join(SETUP_DATA_PATH, 'random', 'CourseEmailBody.txt')) as body_file, \
            open(os.path.join(SETUP_DATA_PATH, 'random', 'CourseSubject.txt')) as subject_file:
        body = random.choice(body_file.read().strip().split('\n'))
        subject = random.choice(subject_file.read().strip().split('\n'))
    now = dateutils.get_current_time()
    emails = []
    for recipient in recipients:
        emails.append(
            {
                "user_id": recipient.user_id,
                "subject": subject,
                "body": body,
                "created": now,
                "email_address": recipient.user_email,
                "semester": course.semester,
                "course": course.course
            }
        )

    return emails
Esempio n. 18
0
 def log_message(
     self, message: str, *,
     is_batch: bool = False,
     which_untrusted: str = "",
     jobname: str = "",
     timelabel: str = "",
     elapsed_time: Optional[int] = None,
     job_id: Optional[str] = None
 ):
     """Log a message to this logger's configured log directory."""
     now = dateutils.get_current_time()
     easy_to_read_date = dateutils.write_submitty_date(now, True)
     batch_string = "BATCH" if is_batch else ""
     if elapsed_time is None:
         elapsed_time = -1
     elapsed_time_string = "" if elapsed_time < 0 else '{:9.3f}'.format(elapsed_time)
     time_unit = "" if elapsed_time < 0 else "sec"
     job_id = job_id or self.job_id
     parts = (easy_to_read_date, f"{job_id:>6s}", f"{batch_string:>5s}", f"{which_untrusted:>11s}",
             f"{jobname:75s}", f"{timelabel:6s} {elapsed_time_string:>9s} {time_unit:>3s}", message)
     write_to_log(self.log_path, ' | '.join((str(x) for x in parts)))
Esempio n. 19
0
def log_message(is_batch=False,
                which_untrusted="",
                jobname="",
                timelabel="",
                elapsed_time=-1,
                message=""):
    now = dateutils.get_current_time()
    datefile = datetime.strftime(now, "%Y%m%d") + ".txt"
    autograding_log_file = os.path.join(AUTOGRADING_LOG_PATH, datefile)
    easy_to_read_date = dateutils.write_submitty_date(now, True)
    my_pid = os.getpid()
    parent_pid = os.getppid()
    batch_string = "BATCH" if is_batch else ""
    elapsed_time_string = "" if elapsed_time < 0 else '{:9.3f}'.format(
        elapsed_time)
    time_unit = "" if elapsed_time < 0 else "sec"
    with open(autograding_log_file, 'a') as myfile:
        fcntl.flock(myfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
        print("%s | %6s | %5s | %11s | %-75s | %-6s %9s %3s | %s" %
              (easy_to_read_date, my_pid, batch_string, which_untrusted,
               jobname, timelabel, elapsed_time_string, time_unit, message),
              file=myfile)
        fcntl.flock(myfile, fcntl.LOCK_UN)
Esempio n. 20
0
def create_container(container_name, container_image, server_container, mounted_directory,job_id,
                      is_batch_job,which_untrusted,submission_path, grading_began,queue_obj,submission_string,testcase_num,name):

  untrusted_uid = str(getpwnam(which_untrusted).pw_uid)

  display_sys_variable = str(os.environ.get('DISPLAY', None))
  display_line = [] if display_sys_variable == None else ['--display', display_sys_variable]
  
  if server_container:
    this_container = subprocess.check_output(['docker', 'create', '-i', '--network', 'none',
                                         '-v', mounted_directory + ':' + mounted_directory,
                                         '-w', mounted_directory,
                                         '--name', container_name,
                                         container_image
                                         ]).decode('utf8').strip()
  else:
    this_container = subprocess.check_output(['docker', 'create', '-i', '-u', untrusted_uid, '--network', 'none',
                                           '-v', mounted_directory + ':' + mounted_directory,
                                           '-w', mounted_directory,
                                           '--hostname', name,
                                           '--name', container_name,
                                           container_image,
                                           os.path.join(mounted_directory, 'my_runner.out'),
                                             queue_obj['gradeable'],
                                             queue_obj['who'],
                                             str(queue_obj['version']),
                                             submission_string,
                                             '--testcase', str(testcase_num),
                                             '--container_name', name]
                                             + display_line
                                             ).decode('utf8').strip()

  dockerlaunch_done =dateutils.get_current_time()
  dockerlaunch_time = (dockerlaunch_done-grading_began).total_seconds()
  grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,"dcct:",dockerlaunch_time,
                                  "docker container {0} created".format(this_container))
  return this_container
Esempio n. 21
0
def get_job(my_name,which_machine,my_capabilities,which_untrusted,overall_lock):
    """
    Picks a job from the queue

    :param overall_lock: a lock on the directory containing all queue files
    """

    time_get_job_begin = dateutils.get_current_time()

    overall_lock.acquire()
    folder= INTERACTIVE_QUEUE


    # ----------------------------------------------------------------
    # Our first priority is to perform any awaiting VCS checkouts

    # Note: This design is imperfect:
    #
    #   * If all shippers are busy working on long-running autograding
    #     tasks there will be a delay of seconds or minutes between
    #     a student pressing the submission button and clone happening.
    #     This is a minor exploit allowing them to theoretically
    #     continue working on their submission past the deadline for
    #     the time period of the delay.
    #     -- This is not a significant, practical problem.
    #
    #   * If multiple and/or large git submissions arrive close
    #     together, this shipper job will be tied up performing these
    #     clone operations.  Because we don't release the lock, any
    #     other shippers that complete their work will also be blocked
    #     from either helping with the clones or tackling the next
    #     autograding job.
    #     -- Based on experience with actual submission patterns, we
    #        do not anticipate that this will be a significant
    #        bottleneck at this time.
    #
    #   * If a git clone takes a very long time and/or hangs because of
    #     network problems, this could halt all work on the server.
    #     -- We'll need to monitor the production server.
    #
    # We plan to do a complete overhaul of the
    # scheduler/shipper/worker and refactoring this design should be
    # part of the project.

    # Grab all the VCS files currently in the folder...
    vcs_files = [str(f) for f in Path(folder).glob('VCS__*')]
    for f in vcs_files:
        vcs_file = f[len(folder)+1:]
        no_vcs_file = f[len(folder)+1+5:]
        # do the checkout
        updated_obj = checkout_vcs_repo(folder+"/"+vcs_file)
        # save the regular grading queue file
        with open(os.path.join(folder,no_vcs_file), "w") as queue_file:
            json.dump(updated_obj, queue_file)
        # cleanup the vcs queue file
        os.remove(folder+"/"+vcs_file)
    # ----------------------------------------------------------------


    # Grab all the files currently in the folder, sorted by creation
    # time, and put them in the queue to be graded
    files = [str(f) for f in Path(folder).glob('*')]
    files_and_times = list()
    for f in files:
        try:
            my_time = os.path.getctime(f)
        except:
            continue
        tup = (f, my_time)
        files_and_times.append(tup)

    files_and_times = sorted(files_and_times, key=operator.itemgetter(1))
    my_job=""

    for full_path_file, file_time in files_and_times:
        # get the file name (without the path)
        just_file = full_path_file[len(folder)+1:]

        # skip items that are already being graded
        if (just_file[0:8]=="GRADING_"):
            continue
        grading_file = os.path.join(folder,"GRADING_"+just_file)
        if grading_file in files:
            continue

        # skip items (very recently added!) that are already waiting for a VCS checkout
        if (just_file[0:5]=="VCS__"):
            continue

        # found something to do
        try:
            with open(full_path_file, 'r') as infile:
                queue_obj = json.load(infile)
        except:
            continue

        #Check to make sure that we are capable of grading this submission
        required_capabilities = queue_obj["required_capabilities"]
        if not required_capabilities in my_capabilities:
            continue

        # prioritize interactive jobs over (batch) regrades
        # if you've found an interactive job, exit early (since they are sorted by timestamp)
        if not "regrade" in queue_obj or not queue_obj["regrade"]:
            my_job = just_file
            break

        # otherwise it's a regrade, and if we don't already have a
        # job, take it, but we have to search the rest of the list
        if my_job == "":
            my_job = just_file

    if not my_job == "":
        grading_file = os.path.join(folder, "GRADING_" + my_job)
        # create the grading file
        with open(os.path.join(grading_file), "w") as queue_file:
            json.dump({"untrusted": which_untrusted, "machine": which_machine}, queue_file)

    overall_lock.release()

    time_get_job_end = dateutils.get_current_time()

    time_delta = time_get_job_end-time_get_job_begin
    if time_delta > datetime.timedelta(milliseconds=100):
        print (my_name, " WARNING: submitty_autograding shipper get_job time ", time_delta)
        autograding_utils.log_message(AUTOGRADING_LOG_PATH, JOB_ID, message=str(my_name)+" WARNING: submitty_autograding shipper get_job time "+str(time_delta))

    return (my_job)
Esempio n. 22
0
def grade_from_zip(my_autograding_zip_file,my_submission_zip_file,which_untrusted):

    os.chdir(SUBMITTY_DATA_DIR)
    tmp = os.path.join("/var/local/submitty/autograding_tmp/",which_untrusted,"tmp")

    if os.path.exists(tmp):
        untrusted_grant_rwx_access(which_untrusted, tmp)
        add_permissions_recursive(tmp,
                  stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                  stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                  stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    # Remove any and all containers left over from past runs.
    old_containers = subprocess.check_output(['docker', 'ps', '-aq', '-f', 'name={0}'.format(which_untrusted)]).split()

    for old_container in old_containers:
        subprocess.call(['docker', 'rm', '-f', old_container.decode('utf8')])

    # clean up old usage of this directory
    shutil.rmtree(tmp,ignore_errors=True)
    os.mkdir(tmp)

    which_machine=socket.gethostname()

    # unzip autograding and submission folders
    tmp_autograding = os.path.join(tmp,"TMP_AUTOGRADING")
    tmp_submission = os.path.join(tmp,"TMP_SUBMISSION")
    try:
        unzip_this_file(my_autograding_zip_file,tmp_autograding)
        unzip_this_file(my_submission_zip_file,tmp_submission)
    except:
        raise
    os.remove(my_autograding_zip_file)
    os.remove(my_submission_zip_file)

    tmp_logs = os.path.join(tmp,"TMP_SUBMISSION","tmp_logs")

    queue_file = os.path.join(tmp_submission,"queue_file.json")
    with open(queue_file, 'r') as infile:
        queue_obj = json.load(infile)

    queue_time_longstring = queue_obj["queue_time"]
    waittime = queue_obj["waittime"]
    is_batch_job = queue_obj["regrade"]
    job_id = queue_obj["job_id"]
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"
    revision = queue_obj.get("revision", None)

    partial_path = os.path.join(queue_obj["gradeable"],queue_obj["who"],str(queue_obj["version"]))
    item_name = os.path.join(queue_obj["semester"],queue_obj["course"],"submissions",partial_path)

    grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,"wait:",waittime,"")

    with open(os.path.join(tmp_submission,".grading_began"), 'r') as f:
        grading_began_longstring = f.read()
    grading_began = dateutils.read_submitty_date(grading_began_longstring)

    submission_path = os.path.join(tmp_submission, "submission")
    checkout_path = os.path.join(tmp_submission, "checkout")

    provided_code_path = os.path.join(tmp_autograding, "provided_code")
    test_input_path = os.path.join(tmp_autograding, "test_input")
    test_output_path = os.path.join(tmp_autograding, "test_output")
    custom_validation_code_path = os.path.join(tmp_autograding, "custom_validation_code")
    bin_path = os.path.join(tmp_autograding, "bin")
    form_json_config = os.path.join(tmp_autograding, "form.json")
    complete_config = os.path.join(tmp_autograding, "complete_config.json")

    with open(form_json_config, 'r') as infile:
        gradeable_config_obj = json.load(infile)
    gradeable_deadline_string = gradeable_config_obj["date_due"]

    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    is_vcs = gradeable_config_obj["upload_type"] == "repository"
    checkout_subdirectory = complete_config_obj["autograding"].get("use_checkout_subdirectory","")
    checkout_subdir_path = os.path.join(checkout_path, checkout_subdirectory)

    if complete_config_obj.get('one_part_only', False):
        allow_only_one_part(submission_path, os.path.join(tmp_logs, "overall.txt"))
        if is_vcs:
            with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
                print("WARNING:  ONE_PART_ONLY OPTION DOES NOT MAKE SENSE WITH VCS SUBMISSION", file=f)


    # --------------------------------------------------------------------
    # START DOCKER

    # NOTE: DOCKER SUPPORT PRELIMINARY -- NEEDS MORE SECURITY BEFORE DEPLOYED ON LIVE SERVER
    complete_config = os.path.join(tmp_autograding,"complete_config.json")
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    # Save ourselves if autograding_method is None.
    autograding_method = complete_config_obj.get("autograding_method", "")
    USE_DOCKER = True if autograding_method == "docker" else False

    # --------------------------------------------------------------------
    # COMPILE THE SUBMITTED CODE

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nCOMPILATION STARTS", file=f)
    
    # copy submitted files to the tmp compilation directory
    tmp_compilation = os.path.join(tmp,"TMP_COMPILATION")
    os.mkdir(tmp_compilation)
    os.chdir(tmp_compilation)

    submission_path = os.path.join(tmp_submission,"submission")
    checkout_path = os.path.join(tmp_submission,"checkout")

    provided_code_path = os.path.join(tmp_autograding,"provided_code")
    test_input_path = os.path.join(tmp_autograding,"test_input")
    test_output_path = os.path.join(tmp_autograding,"test_output")
    custom_validation_code_path = os.path.join(tmp_autograding,"custom_validation_code")
    bin_path = os.path.join(tmp_autograding,"bin")
    form_json_config = os.path.join(tmp_autograding,"form.json")


    with open(form_json_config, 'r') as infile:
        gradeable_config_obj = json.load(infile)
    gradeable_deadline_string = gradeable_config_obj["date_due"]
    
    patterns_submission_to_compilation = complete_config_obj["autograding"]["submission_to_compilation"]

    add_permissions(tmp_logs,stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)

    if USE_DOCKER:
        print("!!!!!!!!!!!!!!!!!!USING DOCKER!!!!!!!!!!!!!!!!!!!!!!!!")

    with open(complete_config, 'r') as infile:
        config = json.load(infile)
        my_testcases = config['testcases']

    # grab the submission time
    with open(os.path.join(submission_path,".submit.timestamp"), 'r') as submission_time_file:
        submission_string = submission_time_file.read().rstrip()

    with open(os.path.join(tmp_logs,"compilation_log.txt"), 'w') as logfile:
        # we start counting from one.
        executable_path_list = list()
        for testcase_num in range(1, len(my_testcases)+1):
            testcase_folder = os.path.join(tmp_compilation, "test{:02}".format(testcase_num))

            if 'type' in my_testcases[testcase_num-1]:
                if my_testcases[testcase_num-1]['type'] != 'FileCheck' and my_testcases[testcase_num-1]['type'] != 'Compilation':
                    continue

                if my_testcases[testcase_num-1]['type'] == 'Compilation':
                    if 'executable_name' in my_testcases[testcase_num-1]:
                        provided_executable_list = my_testcases[testcase_num-1]['executable_name']
                        if not isinstance(provided_executable_list, (list,)):
                            provided_executable_list = list([provided_executable_list])
                        for executable_name in provided_executable_list:
                            if executable_name.strip() == '':
                                continue
                            executable_path = os.path.join(testcase_folder, executable_name)
                            executable_path_list.append((executable_path, executable_name))
            else:
                continue

            os.makedirs(testcase_folder)
            
            pattern_copy("submission_to_compilation",patterns_submission_to_compilation,submission_path,testcase_folder,tmp_logs)

            if is_vcs:
                pattern_copy("checkout_to_compilation",patterns_submission_to_compilation,checkout_subdir_path,testcase_folder,tmp_logs)

            # copy any instructor provided code files to tmp compilation directory
            copy_contents_into(job_id,provided_code_path,testcase_folder,tmp_logs)
            
            # copy compile.out to the current directory
            shutil.copy (os.path.join(bin_path,"compile.out"),os.path.join(testcase_folder,"my_compile.out"))
            add_permissions(os.path.join(testcase_folder,"my_compile.out"), stat.S_IXUSR | stat.S_IXGRP |stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
            #untrusted_grant_rwx_access(which_untrusted, tmp_compilation)          
            untrusted_grant_rwx_access(which_untrusted, testcase_folder)
            add_permissions_recursive(testcase_folder,
                      stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                      stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                      stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

            if USE_DOCKER:
                try:
                    #There can be only one container for a compilation step, so grab its container image
                    #TODO: set default in load_config_json.cpp
                    if my_testcases[testcase_num-1]['type'] == 'FileCheck':
                        print("performing filecheck in default ubuntu:custom container")
                        container_image = "ubuntu:custom"
                    else:
                        container_image = my_testcases[testcase_num-1]["containers"][0]["container_image"]
                        print('creating a compilation container with image {0}'.format(container_image))
                    untrusted_uid = str(getpwnam(which_untrusted).pw_uid)

                    compilation_container = None
                    compilation_container = subprocess.check_output(['docker', 'create', '-i', '-u', untrusted_uid, '--network', 'none',
                                               '-v', testcase_folder + ':' + testcase_folder,
                                               '-w', testcase_folder,
                                               container_image,
                                               #The command to be run.
                                               os.path.join(testcase_folder, 'my_compile.out'), 
                                               queue_obj['gradeable'],
                                               queue_obj['who'], 
                                               str(queue_obj['version']), 
                                               submission_string, 
                                               '--testcase', str(testcase_num)
                                               ]).decode('utf8').strip()
                    print("starting container")
                    compile_success = subprocess.call(['docker', 'start', '-i', compilation_container],
                                                   stdout=logfile,
                                                   cwd=testcase_folder)
                except Exception as e:
                    print('An error occurred when compiling with docker.')
                    grade_items_logging.log_stack_trace(job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())
                finally:
                    if compilation_container != None:
                        subprocess.call(['docker', 'rm', '-f', compilation_container])
                        print("cleaned up compilation container.")
            else:
                compile_success = subprocess.call([os.path.join(SUBMITTY_INSTALL_DIR, "sbin", "untrusted_execute"),
                                                   which_untrusted,
                                                   os.path.join(testcase_folder,"my_compile.out"),
                                                   queue_obj["gradeable"],
                                                   queue_obj["who"],
                                                   str(queue_obj["version"]),
                                                   submission_string,
                                                   '--testcase', str(testcase_num)],
                                                   stdout=logfile, 
                                                   cwd=testcase_folder)
            # remove the compilation program
            untrusted_grant_rwx_access(which_untrusted, testcase_folder)
            os.remove(os.path.join(testcase_folder,"my_compile.out"))

    if compile_success == 0:
        print (which_machine,which_untrusted,"COMPILATION OK")
    else:
        print (which_machine,which_untrusted,"COMPILATION FAILURE")
        grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,message="COMPILATION FAILURE")
    add_permissions_recursive(tmp_compilation,
                      stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                      stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                      stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)


    # return to the main tmp directory
    os.chdir(tmp)


    # --------------------------------------------------------------------
    # make the runner directory

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        print ("====================================\nRUNNER STARTS", file=f)
        
    tmp_work = os.path.join(tmp,"TMP_WORK")
    tmp_work_test_input = os.path.join(tmp_work, "test_input")
    tmp_work_submission = os.path.join(tmp_work, "submitted_files")
    tmp_work_compiled = os.path.join(tmp_work, "compiled_files")
    tmp_work_checkout = os.path.join(tmp_work, "checkout")
    
    os.mkdir(tmp_work)

    os.mkdir(tmp_work_test_input)
    os.mkdir(tmp_work_submission)
    os.mkdir(tmp_work_compiled)
    os.mkdir(tmp_work_checkout)

    os.chdir(tmp_work)

    # move all executable files from the compilation directory to the main tmp directory
    # Note: Must preserve the directory structure of compiled files (esp for Java)

    patterns_submission_to_runner = complete_config_obj["autograding"]["submission_to_runner"]

    pattern_copy("submission_to_runner",patterns_submission_to_runner,submission_path,tmp_work_submission,tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_runner",patterns_submission_to_runner,checkout_subdir_path,tmp_work_checkout,tmp_logs)

    # move the compiled files into the tmp_work_compiled directory
    for path, name in executable_path_list:
        if not os.path.isfile(path): 
            continue
        target_path = os.path.join(tmp_work_compiled, name)
        if not os.path.exists(target_path):
            os.makedirs(os.path.dirname(target_path), exist_ok=True)
        shutil.copy(path, target_path)
        print('copied over {0}'.format(target_path))
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print('grade_item: copied over {0}'.format(target_path), file=f)

    patterns_compilation_to_runner = complete_config_obj["autograding"]["compilation_to_runner"]
    #copy into the actual tmp_work directory for archiving/validating
    pattern_copy("compilation_to_runner",patterns_compilation_to_runner,tmp_compilation,tmp_work,tmp_logs)
    #copy into tmp_work_compiled, which is provided to each testcase
    # TODO change this as our methodology for declaring testcase dependencies becomes more robust
    pattern_copy("compilation_to_runner",patterns_compilation_to_runner,tmp_compilation,tmp_work_compiled,tmp_logs)

    # copy input files to tmp_work directory
    copy_contents_into(job_id,test_input_path,tmp_work_test_input,tmp_logs)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy runner.out to the current directory
    shutil.copy (os.path.join(bin_path,"run.out"),os.path.join(tmp_work,"my_runner.out"))

    #set the appropriate permissions for the newly created directories 
    #TODO replaces commented out code below

    add_permissions(os.path.join(tmp_work,"my_runner.out"), stat.S_IXUSR | stat.S_IXGRP |stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions(tmp_work_submission, stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions(tmp_work_compiled, stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions(tmp_work_checkout, stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    #TODO this is how permissions used to be set. It was removed because of the way it interacts with the sticky bit.
    ## give the untrusted user read/write/execute permissions on the tmp directory & files
    # os.system('ls -al {0}'.format(tmp_work))
    # add_permissions_recursive(tmp_work,
    #                           stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
    #                           stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
    #                           stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    ##################################################################################################
    #call grade_item_main_runner.py
    runner_success = grade_item_main_runner.executeTestcases(complete_config_obj, tmp_logs, tmp_work, queue_obj, submission_string, 
                                                                                    item_name, USE_DOCKER, None, which_untrusted,
                                                                                    job_id, grading_began)
    ##################################################################################################

    if runner_success == 0:
        print (which_machine,which_untrusted, "RUNNER OK")
    else:
        print (which_machine,which_untrusted, "RUNNER FAILURE")
        grade_items_logging.log_message(job_id, is_batch_job, which_untrusted, item_name, message="RUNNER FAILURE")

    add_permissions_recursive(tmp_work,
                          stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                          stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                          stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH) 
    add_permissions_recursive(tmp_compilation,
                          stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                          stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                          stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH) 

    # --------------------------------------------------------------------
    # RUN VALIDATOR
    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        print ("====================================\nVALIDATION STARTS", file=f)

    # copy results files from compilation...
    patterns_submission_to_validation = complete_config_obj["autograding"]["submission_to_validation"]
    pattern_copy("submission_to_validation",patterns_submission_to_validation,submission_path,tmp_work,tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_validation",patterns_submission_to_validation,checkout_subdir_path,tmp_work,tmp_logs)
    patterns_compilation_to_validation = complete_config_obj["autograding"]["compilation_to_validation"]
    pattern_copy("compilation_to_validation",patterns_compilation_to_validation,tmp_compilation,tmp_work,tmp_logs)

    # remove the compilation directory
    shutil.rmtree(tmp_compilation)

    # copy output files to tmp_work directory
    copy_contents_into(job_id,test_output_path,tmp_work,tmp_logs)

    # copy any instructor custom validation code into the tmp work directory
    copy_contents_into(job_id,custom_validation_code_path,tmp_work,tmp_logs)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy validator.out to the current directory
    shutil.copy (os.path.join(bin_path,"validate.out"),os.path.join(tmp_work,"my_validator.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_work,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    add_permissions(os.path.join(tmp_work,"my_validator.out"), stat.S_IXUSR | stat.S_IXGRP |stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    #todo remove prints.
    print("VALIDATING")
    # validator the validator.out as the untrusted user
    with open(os.path.join(tmp_logs,"validator_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            # WIP: This option file facilitated testing...
            #USE_DOCKER = os.path.isfile("/tmp/use_docker")
            #use_docker_string="grading begins, using DOCKER" if USE_DOCKER else "grading begins (not using docker)"
            #grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,message=use_docker_string)
            container = subprocess.check_output(['docker', 'run', '-t', '-d',
                                                 '-v', tmp + ':' + tmp,
                                                 'ubuntu:custom']).decode('utf8').strip()
            dockerlaunch_done=dateutils.get_current_time()
            dockerlaunch_time = (dockerlaunch_done-grading_began).total_seconds()
            grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,"dcct:",dockerlaunch_time,"docker container created")

            validator_success = subprocess.call(['docker', 'exec', '-w', tmp_work, container,
                                                 os.path.join(tmp_work, 'my_validator.out'), queue_obj['gradeable'],
                                                 queue_obj['who'], str(queue_obj['version']), submission_string], stdout=logfile)
        else:
            validator_success = subprocess.call([os.path.join(SUBMITTY_INSTALL_DIR,"sbin","untrusted_execute"),
                                                 which_untrusted,
                                                 os.path.join(tmp_work,"my_validator.out"),
                                                 queue_obj["gradeable"],
                                                 queue_obj["who"],
                                                 str(queue_obj["version"]),
                                                 submission_string],
                                                stdout=logfile)

    if validator_success == 0:
        print (which_machine,which_untrusted,"VALIDATOR OK")
    else:
        print (which_machine,which_untrusted,"VALIDATOR FAILURE")
        grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,message="VALIDATION FAILURE")

    untrusted_grant_rwx_access(which_untrusted,tmp_work)

    # grab the result of autograding
    grade_result = ""
    try:
        with open(os.path.join(tmp_work,"grade.txt")) as f:
            lines = f.readlines()
            for line in lines:
                line = line.rstrip('\n')
                if line.startswith("Automatic grading total:"):
                    grade_result = line
    except:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("\n\nERROR: Grading incomplete -- Could not open ",os.path.join(tmp_work,"grade.txt"))
            grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,message="ERROR: grade.txt does not exist")
            grade_items_logging.log_stack_trace(job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())

    # --------------------------------------------------------------------
    # MAKE RESULTS DIRECTORY & COPY ALL THE FILES THERE
    tmp_results = os.path.join(tmp,"TMP_RESULTS")

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        print ("====================================\nARCHIVING STARTS", file=f)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    os.makedirs(os.path.join(tmp_results,"details"))

    # remove the test_input directory, so we don't archive it!
    shutil.rmtree(os.path.join(tmp_work,"test_input"))

    # loop over the test case directories, and remove any files that are also in the test_input folder
    for testcase_num in range(1, len(my_testcases)+1):
        testcase_folder = os.path.join(tmp_work, "test{:02}".format(testcase_num))
        remove_test_input_files(os.path.join(tmp_logs,"overall.txt"),test_input_path,testcase_folder)

    patterns_work_to_details = complete_config_obj["autograding"]["work_to_details"]
    pattern_copy("work_to_details",patterns_work_to_details,tmp_work,os.path.join(tmp_results,"details"),tmp_logs)

    if ("work_to_public" in complete_config_obj["autograding"] and
        len(complete_config_obj["autograding"]["work_to_public"]) > 0):
        # create the directory
        os.makedirs(os.path.join(tmp_results,"results_public"))
        # copy the files
        patterns_work_to_public = complete_config_obj["autograding"]["work_to_public"]
        pattern_copy("work_to_public",patterns_work_to_public,tmp_work,os.path.join(tmp_results,"results_public"),tmp_logs)

    history_file_tmp = os.path.join(tmp_submission,"history.json")
    history_file = os.path.join(tmp_results,"history.json")
    if os.path.isfile(history_file_tmp):
        shutil.move(history_file_tmp,history_file)
        # fix permissions
        ta_group_id = os.stat(tmp_results).st_gid
        os.chown(history_file,int(DAEMON_UID),ta_group_id)
        add_permissions(history_file,stat.S_IRGRP)
    grading_finished = dateutils.get_current_time()

    try:
        shutil.copy(os.path.join(tmp_work,"grade.txt"),tmp_results)
    except:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("\n\nERROR: Grading incomplete -- Could not copy ",os.path.join(tmp_work,"grade.txt"))
        grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,message="ERROR: grade.txt does not exist")
        grade_items_logging.log_stack_trace(job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())

    # -------------------------------------------------------------
    # create/append to the results history

    # grab the submission time
    with open (os.path.join(submission_path,".submit.timestamp")) as submission_time_file:
        submission_string = submission_time_file.read().rstrip()
    submission_datetime = dateutils.read_submitty_date(submission_string)

    gradeable_deadline_datetime = dateutils.read_submitty_date(gradeable_deadline_string)
    gradeable_deadline_longstring = dateutils.write_submitty_date(gradeable_deadline_datetime)
    submission_longstring = dateutils.write_submitty_date(submission_datetime)
    
    seconds_late = int((submission_datetime-gradeable_deadline_datetime).total_seconds())
    # note: negative = not late

    grading_finished_longstring = dateutils.write_submitty_date(grading_finished)

    gradingtime = (grading_finished-grading_began).total_seconds()

    with open(os.path.join(tmp_submission,"queue_file.json"), 'r') as infile:
        queue_obj = json.load(infile)
    queue_obj["gradingtime"]=gradingtime
    queue_obj["grade_result"]=grade_result
    queue_obj["which_untrusted"]=which_untrusted

    with open(os.path.join(tmp_results,"queue_file.json"),'w') as outfile:
        json.dump(queue_obj,outfile,sort_keys=True,indent=4,separators=(',', ': '))

    try:
        shutil.move(os.path.join(tmp_work, "results.json"), os.path.join(tmp_results, "results.json"))
    except:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("\n\nERROR: Grading incomplete -- Could not open/write ",os.path.join(tmp_work,"results.json"))
            grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,message="ERROR: results.json read/write error")
            grade_items_logging.log_stack_trace(job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())

    write_grade_history.just_write_grade_history(history_file,
                                                 gradeable_deadline_longstring,
                                                 submission_longstring,
                                                 seconds_late,
                                                 queue_time_longstring,
                                                 is_batch_job_string,
                                                 grading_began_longstring,
                                                 int(waittime),
                                                 grading_finished_longstring,
                                                 int(gradingtime),
                                                 grade_result,
                                                 revision)

    os.chdir(SUBMITTY_DATA_DIR)

    if USE_DOCKER:
        with open(os.path.join(tmp_logs,"overall_log.txt"), 'w') as logfile:
            chmod_success = subprocess.call(['docker', 'exec', '-w', tmp_work, container,
                                             'chmod', '-R', 'ugo+rwx', '.'], stdout=logfile)

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        f.write("FINISHED GRADING!\n")

    # save the logs!
    shutil.copytree(tmp_logs,os.path.join(tmp_results,"logs"))

    # zip up results folder
    filehandle, my_results_zip_file=tempfile.mkstemp()
    zip_my_directory(tmp_results,my_results_zip_file)
    os.close(filehandle)
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp_results)
    shutil.rmtree(tmp_work)
    shutil.rmtree(tmp)

    # WIP: extra logging for testing
    #grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,message="done grading")

    # --------------------------------------------------------------------
    # CLEAN UP DOCKER
    if USE_DOCKER:
        subprocess.call(['docker', 'rm', '-f', container])
        dockerdestroy_done=dateutils.get_current_time()
        dockerdestroy_time = (dockerdestroy_done-grading_finished).total_seconds()
        grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,"ddt:",dockerdestroy_time,"docker container destroyed")
        
    grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,item_name,"grade:",gradingtime,grade_result)

    return my_results_zip_file
Esempio n. 23
0
def prepare_autograding_and_submission_zip(which_machine, which_untrusted,
                                           next_directory, next_to_grade):
    os.chdir(SUBMITTY_DATA_DIR)

    # generate a random id to be used to track this job in the autograding logs
    job_id = ''.join(
        random.choice(string.ascii_letters + string.digits) for _ in range(6))

    # --------------------------------------------------------
    # figure out what we're supposed to grade & error checking
    obj = load_queue_file_obj(job_id, next_directory, next_to_grade)

    partial_path = os.path.join(obj["gradeable"], obj["who"],
                                str(obj["version"]))
    item_name = os.path.join(obj["semester"], obj["course"], "submissions",
                             partial_path)
    submission_path = os.path.join(SUBMITTY_DATA_DIR, "courses", item_name)
    if not os.path.isdir(submission_path):
        grade_items_logging.log_message(
            job_id,
            message="ERROR: the submission directory does not exist " +
            submission_path)
        raise RuntimeError("ERROR: the submission directory does not exist",
                           submission_path)
    print(which_machine, which_untrusted, "prepare zip", submission_path)
    is_vcs, vcs_type, vcs_base_url, vcs_subdirectory = get_vcs_info(
        SUBMITTY_DATA_DIR, obj["semester"], obj["course"], obj["gradeable"],
        obj["who"], obj["team"])

    is_batch_job = "regrade" in obj and obj["regrade"]
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"

    queue_time = get_queue_time(next_directory, next_to_grade)
    queue_time_longstring = dateutils.write_submitty_date(queue_time)
    grading_began = dateutils.get_current_time()
    waittime = (grading_began - queue_time).total_seconds()
    grade_items_logging.log_message(job_id, is_batch_job, "zip", item_name,
                                    "wait:", waittime, "")

    # --------------------------------------------------------
    # various paths
    provided_code_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                      obj["semester"], obj["course"],
                                      "provided_code", obj["gradeable"])
    test_input_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                   obj["semester"], obj["course"],
                                   "test_input", obj["gradeable"])
    test_output_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                    obj["semester"], obj["course"],
                                    "test_output", obj["gradeable"])
    custom_validation_code_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                               obj["semester"], obj["course"],
                                               "custom_validation_code",
                                               obj["gradeable"])
    bin_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                            obj["course"], "bin", obj["gradeable"])
    form_json_config = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                    obj["semester"], obj["course"], "config",
                                    "form",
                                    "form_" + obj["gradeable"] + ".json")
    complete_config = os.path.join(
        SUBMITTY_DATA_DIR, "courses", obj["semester"], obj["course"], "config",
        "complete_config", "complete_config_" + obj["gradeable"] + ".json")

    if not os.path.exists(form_json_config):
        grade_items_logging.log_message(
            job_id,
            message="ERROR: the form json file does not exist " +
            form_json_config)
        raise RuntimeError("ERROR: the form json file does not exist ",
                           form_json_config)
    if not os.path.exists(complete_config):
        grade_items_logging.log_message(
            job_id,
            message="ERROR: the complete config file does not exist " +
            complete_config)
        raise RuntimeError("ERROR: the complete config file does not exist ",
                           complete_config)

    # --------------------------------------------------------------------
    # MAKE TEMPORARY DIRECTORY & COPY THE NECESSARY FILES THERE
    tmp = tempfile.mkdtemp()
    tmp_autograding = os.path.join(tmp, "TMP_AUTOGRADING")
    os.mkdir(tmp_autograding)
    tmp_submission = os.path.join(tmp, "TMP_SUBMISSION")
    os.mkdir(tmp_submission)

    copytree_if_exists(provided_code_path,
                       os.path.join(tmp_autograding, "provided_code"))
    copytree_if_exists(test_input_path,
                       os.path.join(tmp_autograding, "test_input"))
    copytree_if_exists(test_output_path,
                       os.path.join(tmp_autograding, "test_output"))
    copytree_if_exists(custom_validation_code_path,
                       os.path.join(tmp_autograding, "custom_validation_code"))
    copytree_if_exists(bin_path, os.path.join(tmp_autograding, "bin"))
    shutil.copy(form_json_config, os.path.join(tmp_autograding, "form.json"))
    shutil.copy(complete_config,
                os.path.join(tmp_autograding, "complete_config.json"))

    checkout_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                                 obj["course"], "checkout", partial_path)
    results_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                                obj["course"], "results", partial_path)

    # grab a copy of the current history.json file (if it exists)
    history_file = os.path.join(results_path, "history.json")
    history_file_tmp = ""
    if os.path.isfile(history_file):
        shutil.copy(history_file, os.path.join(tmp_submission, "history.json"))
    # get info from the gradeable config file
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    checkout_subdirectory = complete_config_obj["autograding"].get(
        "use_checkout_subdirectory", "")
    checkout_subdir_path = os.path.join(checkout_path, checkout_subdirectory)
    queue_file = os.path.join(next_directory, next_to_grade)

    # switch to tmp directory
    os.chdir(tmp)

    # make the logs directory
    tmp_logs = os.path.join(tmp, "TMP_SUBMISSION", "tmp_logs")
    os.makedirs(tmp_logs)
    # 'touch' a file in the logs folder
    open(os.path.join(tmp_logs, "overall.txt"), 'a')

    # grab the submission time
    with open(os.path.join(submission_path,
                           ".submit.timestamp")) as submission_time_file:
        submission_string = submission_time_file.read().rstrip()
    submission_datetime = dateutils.read_submitty_date(submission_string)

    # --------------------------------------------------------------------
    # CHECKOUT THE STUDENT's REPO
    if is_vcs:

        # cleanup the previous checkout (if it exists)
        shutil.rmtree(checkout_path, ignore_errors=True)
        os.makedirs(checkout_path, exist_ok=True)

        try:
            # If we are public or private github, we will have an empty vcs_subdirectory
            if vcs_subdirectory == '':
                with open(os.path.join(
                        submission_path,
                        ".submit.VCS_CHECKOUT")) as submission_vcs_file:
                    VCS_JSON = json.load(submission_vcs_file)
                    git_user_id = VCS_JSON["git_user_id"]
                    git_repo_id = VCS_JSON["git_repo_id"]
                    if not valid_github_user_id(git_user_id):
                        raise Exception(
                            "Invalid GitHub user/organization name: '" +
                            git_user_id + "'")
                    if not valid_github_repo_id(git_repo_id):
                        raise Exception("Invalid GitHub repository name: '" +
                                        git_repo_id + "'")
                    # construct path for GitHub
                    vcs_path = "https://www.github.com/" + git_user_id + "/" + git_repo_id

            # is vcs_subdirectory standalone or should it be combined with base_url?
            elif vcs_subdirectory[0] == '/' or '://' in vcs_subdirectory:
                vcs_path = vcs_subdirectory
            else:
                if '://' in vcs_base_url:
                    vcs_path = urllib.parse.urljoin(vcs_base_url,
                                                    vcs_subdirectory)
                else:
                    vcs_path = os.path.join(vcs_base_url, vcs_subdirectory)

            with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
                print("====================================\nVCS CHECKOUT",
                      file=f)
                print('vcs_base_url', vcs_base_url, file=f)
                print('vcs_subdirectory', vcs_subdirectory, file=f)
                print('vcs_path', vcs_path, file=f)
                print(['/usr/bin/git', 'clone', vcs_path, checkout_path],
                      file=f)

            # git clone may fail -- because repository does not exist,
            # or because we don't have appropriate access credentials
            try:
                subprocess.check_call(
                    ['/usr/bin/git', 'clone', vcs_path, checkout_path])
                os.chdir(checkout_path)

                # determine which version we need to checkout
                # if the repo is empty or the master branch does not exist, this command will fail
                try:
                    what_version = subprocess.check_output([
                        'git', 'rev-list', '-n', '1',
                        '--before="' + submission_string + '"', 'master'
                    ])
                    what_version = str(what_version.decode('utf-8')).rstrip()
                    if what_version == "":
                        # oops, pressed the grade button before a valid commit
                        shutil.rmtree(checkout_path, ignore_errors=True)
                    else:
                        # and check out the right version
                        subprocess.call(
                            ['git', 'checkout', '-b', 'grade', what_version])
                    os.chdir(tmp)
                    subprocess.call(['ls', '-lR', checkout_path],
                                    stdout=open(tmp_logs + "/overall.txt",
                                                'a'))
                    obj['revision'] = what_version

                # exception on git rev-list
                except subprocess.CalledProcessError as error:
                    grade_items_logging.log_message(
                        job_id,
                        message=
                        "ERROR: failed to determine version on master branch "
                        + str(error))
                    os.chdir(checkout_path)
                    with open(
                            os.path.join(
                                checkout_path,
                                "failed_to_determine_version_on_master_branch.txt"
                            ), 'w') as f:
                        print(str(error), file=f)
                        print("\n", file=f)
                        print(
                            "Check to be sure the repository is not empty.\n",
                            file=f)
                        print(
                            "Check to be sure the repository has a master branch.\n",
                            file=f)
                        print(
                            "And check to be sure the timestamps on the master branch are reasonable.\n",
                            file=f)

            # exception on git clone
            except subprocess.CalledProcessError as error:
                grade_items_logging.log_message(
                    job_id,
                    message="ERROR: failed to clone repository " + str(error))
                os.chdir(checkout_path)
                with open(
                        os.path.join(checkout_path,
                                     "failed_to_clone_repository.txt"),
                        'w') as f:
                    print(str(error), file=f)
                    print("\n", file=f)
                    print("Check to be sure the repository exists.\n", file=f)
                    print(
                        "And check to be sure the submitty_daemon user has appropriate access credentials.\n",
                        file=f)

        # exception in constructing full git repository url/path
        except Exception as error:
            grade_items_logging.log_message(
                job_id,
                message="ERROR: failed to construct valid repository url/path"
                + str(error))
            os.chdir(checkout_path)
            with open(
                    os.path.join(
                        checkout_path,
                        "failed_to_construct_valid_repository_url.txt"),
                    'w') as f:
                print(str(error), file=f)
                print("\n", file=f)
                print("Check to be sure the repository exists.\n", file=f)
                print(
                    "And check to be sure the submitty_daemon user has appropriate access credentials.\n",
                    file=f)

    copytree_if_exists(submission_path,
                       os.path.join(tmp_submission, "submission"))
    copytree_if_exists(checkout_path, os.path.join(tmp_submission, "checkout"))
    obj["queue_time"] = queue_time_longstring
    obj["regrade"] = is_batch_job
    obj["waittime"] = waittime
    obj["job_id"] = job_id

    with open(os.path.join(tmp_submission, "queue_file.json"), 'w') as outfile:
        json.dump(obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    grading_began_longstring = dateutils.write_submitty_date(grading_began)
    with open(os.path.join(tmp_submission, ".grading_began"), 'w') as f:
        print(grading_began_longstring, file=f)

    # zip up autograding & submission folders
    filehandle1, my_autograding_zip_file = tempfile.mkstemp()
    filehandle2, my_submission_zip_file = tempfile.mkstemp()
    grade_item.zip_my_directory(tmp_autograding, my_autograding_zip_file)
    grade_item.zip_my_directory(tmp_submission, my_submission_zip_file)
    os.close(filehandle1)
    os.close(filehandle2)
    # cleanup
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp)

    #grade_items_logging.log_message(job_id,is_batch_job,"done zip",item_name)

    return (my_autograding_zip_file, my_submission_zip_file)
Esempio n. 24
0
def grade_from_zip(my_autograding_zip_file, my_submission_zip_file,
                   which_untrusted):

    os.chdir(SUBMITTY_DATA_DIR)
    tmp = os.path.join("/var/local/submitty/autograding_tmp/", which_untrusted,
                       "tmp")

    if os.path.exists(tmp):
        untrusted_grant_rwx_access(which_untrusted, tmp)
        add_permissions_recursive(
            tmp, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP
            | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH
            | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
            | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH
            | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR
            | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP
            | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    # Remove any and all containers left over from past runs.
    old_containers = subprocess.check_output(
        ['docker', 'ps', '-aq', '-f',
         'name={0}'.format(which_untrusted)]).split()

    for old_container in old_containers:
        subprocess.call(['docker', 'rm', '-f', old_container.decode('utf8')])

    # clean up old usage of this directory
    shutil.rmtree(tmp, ignore_errors=True)
    os.mkdir(tmp)

    which_machine = socket.gethostname()

    # unzip autograding and submission folders
    tmp_autograding = os.path.join(tmp, "TMP_AUTOGRADING")
    tmp_submission = os.path.join(tmp, "TMP_SUBMISSION")
    try:
        unzip_this_file(my_autograding_zip_file, tmp_autograding)
        unzip_this_file(my_submission_zip_file, tmp_submission)
    except:
        raise
    os.remove(my_autograding_zip_file)
    os.remove(my_submission_zip_file)

    tmp_logs = os.path.join(tmp, "TMP_SUBMISSION", "tmp_logs")

    queue_file = os.path.join(tmp_submission, "queue_file.json")
    with open(queue_file, 'r') as infile:
        queue_obj = json.load(infile)

    queue_time_longstring = queue_obj["queue_time"]
    waittime = queue_obj["waittime"]
    is_batch_job = queue_obj["regrade"]
    job_id = queue_obj["job_id"]
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"
    revision = queue_obj.get("revision", None)

    partial_path = os.path.join(queue_obj["gradeable"], queue_obj["who"],
                                str(queue_obj["version"]))
    item_name = os.path.join(queue_obj["semester"], queue_obj["course"],
                             "submissions", partial_path)

    grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                    item_name, "wait:", waittime, "")

    with open(os.path.join(tmp_submission, ".grading_began"), 'r') as f:
        grading_began_longstring = f.read()
    grading_began = dateutils.read_submitty_date(grading_began_longstring)

    submission_path = os.path.join(tmp_submission, "submission")
    checkout_path = os.path.join(tmp_submission, "checkout")

    provided_code_path = os.path.join(tmp_autograding, "provided_code")
    test_input_path = os.path.join(tmp_autograding, "test_input")
    test_output_path = os.path.join(tmp_autograding, "test_output")
    custom_validation_code_path = os.path.join(tmp_autograding,
                                               "custom_validation_code")
    bin_path = os.path.join(tmp_autograding, "bin")
    form_json_config = os.path.join(tmp_autograding, "form.json")
    complete_config = os.path.join(tmp_autograding, "complete_config.json")

    with open(form_json_config, 'r') as infile:
        gradeable_config_obj = json.load(infile)
    gradeable_deadline_string = gradeable_config_obj["date_due"]

    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    is_vcs = gradeable_config_obj["upload_type"] == "repository"
    checkout_subdirectory = complete_config_obj["autograding"].get(
        "use_checkout_subdirectory", "")
    checkout_subdir_path = os.path.join(checkout_path, checkout_subdirectory)

    if complete_config_obj.get('one_part_only', False):
        allow_only_one_part(submission_path,
                            os.path.join(tmp_logs, "overall.txt"))
        if is_vcs:
            with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
                print(
                    "WARNING:  ONE_PART_ONLY OPTION DOES NOT MAKE SENSE WITH VCS SUBMISSION",
                    file=f)

    # --------------------------------------------------------------------
    # START DOCKER

    # NOTE: DOCKER SUPPORT PRELIMINARY -- NEEDS MORE SECURITY BEFORE DEPLOYED ON LIVE SERVER
    complete_config = os.path.join(tmp_autograding, "complete_config.json")
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    # Save ourselves if autograding_method is None.
    autograding_method = complete_config_obj.get("autograding_method", "")
    USE_DOCKER = True if autograding_method == "docker" else False

    # --------------------------------------------------------------------
    # COMPILE THE SUBMITTED CODE

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nCOMPILATION STARTS",
              file=f)

    # copy submitted files to the tmp compilation directory
    tmp_compilation = os.path.join(tmp, "TMP_COMPILATION")
    os.mkdir(tmp_compilation)
    os.chdir(tmp_compilation)

    submission_path = os.path.join(tmp_submission, "submission")
    checkout_path = os.path.join(tmp_submission, "checkout")

    provided_code_path = os.path.join(tmp_autograding, "provided_code")
    test_input_path = os.path.join(tmp_autograding, "test_input")
    test_output_path = os.path.join(tmp_autograding, "test_output")
    custom_validation_code_path = os.path.join(tmp_autograding,
                                               "custom_validation_code")
    bin_path = os.path.join(tmp_autograding, "bin")
    form_json_config = os.path.join(tmp_autograding, "form.json")

    with open(form_json_config, 'r') as infile:
        gradeable_config_obj = json.load(infile)
    gradeable_deadline_string = gradeable_config_obj["date_due"]

    patterns_submission_to_compilation = complete_config_obj["autograding"][
        "submission_to_compilation"]

    add_permissions(tmp_logs, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)

    if USE_DOCKER:
        print("!!!!!!!!!!!!!!!!!!USING DOCKER!!!!!!!!!!!!!!!!!!!!!!!!")

    with open(complete_config, 'r') as infile:
        config = json.load(infile)
        my_testcases = config['testcases']

    # grab the submission time
    with open(os.path.join(submission_path, ".submit.timestamp"),
              'r') as submission_time_file:
        submission_string = submission_time_file.read().rstrip()

    with open(os.path.join(tmp_logs, "compilation_log.txt"), 'w') as logfile:
        # we start counting from one.
        executable_path_list = list()
        for testcase_num in range(1, len(my_testcases) + 1):
            testcase_folder = os.path.join(tmp_compilation,
                                           "test{:02}".format(testcase_num))

            if 'type' in my_testcases[testcase_num - 1]:
                if my_testcases[testcase_num -
                                1]['type'] != 'FileCheck' and my_testcases[
                                    testcase_num - 1]['type'] != 'Compilation':
                    continue

                if my_testcases[testcase_num - 1]['type'] == 'Compilation':
                    if 'executable_name' in my_testcases[testcase_num - 1]:
                        provided_executable_list = my_testcases[
                            testcase_num - 1]['executable_name']
                        if not isinstance(provided_executable_list, (list, )):
                            provided_executable_list = list(
                                [provided_executable_list])
                        for executable_name in provided_executable_list:
                            if executable_name.strip() == '':
                                continue
                            executable_path = os.path.join(
                                testcase_folder, executable_name)
                            executable_path_list.append(
                                (executable_path, executable_name))
            else:
                continue

            os.makedirs(testcase_folder)

            pattern_copy("submission_to_compilation",
                         patterns_submission_to_compilation, submission_path,
                         testcase_folder, tmp_logs)

            if is_vcs:
                pattern_copy("checkout_to_compilation",
                             patterns_submission_to_compilation,
                             checkout_subdir_path, testcase_folder, tmp_logs)

            # copy any instructor provided code files to tmp compilation directory
            copy_contents_into(job_id, provided_code_path, testcase_folder,
                               tmp_logs)

            # copy compile.out to the current directory
            shutil.copy(os.path.join(bin_path, "compile.out"),
                        os.path.join(testcase_folder, "my_compile.out"))
            add_permissions(
                os.path.join(testcase_folder, "my_compile.out"), stat.S_IXUSR
                | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
            #untrusted_grant_rwx_access(which_untrusted, tmp_compilation)
            untrusted_grant_rwx_access(which_untrusted, testcase_folder)
            add_permissions_recursive(
                testcase_folder, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
                | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH
                | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR
                | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP
                | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR
                | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP
                | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

            if USE_DOCKER:
                try:
                    #There can be only one container for a compilation step, so grab its container image
                    #TODO: set default in load_config_json.cpp
                    if my_testcases[testcase_num - 1]['type'] == 'FileCheck':
                        print(
                            "performing filecheck in default ubuntu:custom container"
                        )
                        container_image = "ubuntu:custom"
                    else:
                        container_image = my_testcases[
                            testcase_num -
                            1]["containers"][0]["container_image"]
                        print(
                            'creating a compilation container with image {0}'.
                            format(container_image))
                    untrusted_uid = str(getpwnam(which_untrusted).pw_uid)

                    compilation_container = None
                    compilation_container = subprocess.check_output([
                        'docker',
                        'create',
                        '-i',
                        '-u',
                        untrusted_uid,
                        '--network',
                        'none',
                        '-v',
                        testcase_folder + ':' + testcase_folder,
                        '-w',
                        testcase_folder,
                        container_image,
                        #The command to be run.
                        os.path.join(testcase_folder, 'my_compile.out'),
                        queue_obj['gradeable'],
                        queue_obj['who'],
                        str(queue_obj['version']),
                        submission_string,
                        '--testcase',
                        str(testcase_num)
                    ]).decode('utf8').strip()
                    print("starting container")
                    compile_success = subprocess.call(
                        ['docker', 'start', '-i', compilation_container],
                        stdout=logfile,
                        cwd=testcase_folder)
                except Exception as e:
                    print('An error occurred when compiling with docker.')
                    grade_items_logging.log_stack_trace(
                        job_id,
                        is_batch_job,
                        which_untrusted,
                        item_name,
                        trace=traceback.format_exc())
                finally:
                    if compilation_container != None:
                        subprocess.call(
                            ['docker', 'rm', '-f', compilation_container])
                        print("cleaned up compilation container.")
            else:
                compile_success = subprocess.call([
                    os.path.join(SUBMITTY_INSTALL_DIR, "sbin",
                                 "untrusted_execute"), which_untrusted,
                    os.path.join(testcase_folder, "my_compile.out"),
                    queue_obj["gradeable"], queue_obj["who"],
                    str(queue_obj["version"]), submission_string, '--testcase',
                    str(testcase_num)
                ],
                                                  stdout=logfile,
                                                  cwd=testcase_folder)
            # remove the compilation program
            untrusted_grant_rwx_access(which_untrusted, testcase_folder)
            os.remove(os.path.join(testcase_folder, "my_compile.out"))

    if compile_success == 0:
        print(which_machine, which_untrusted, "COMPILATION OK")
    else:
        print(which_machine, which_untrusted, "COMPILATION FAILURE")
        grade_items_logging.log_message(job_id,
                                        is_batch_job,
                                        which_untrusted,
                                        item_name,
                                        message="COMPILATION FAILURE")
    add_permissions_recursive(
        tmp_compilation, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
        | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH
        | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR
        | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP
        | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR
        | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP
        | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    # return to the main tmp directory
    os.chdir(tmp)

    # --------------------------------------------------------------------
    # make the runner directory

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nRUNNER STARTS", file=f)

    tmp_work = os.path.join(tmp, "TMP_WORK")
    tmp_work_test_input = os.path.join(tmp_work, "test_input")
    tmp_work_submission = os.path.join(tmp_work, "submitted_files")
    tmp_work_compiled = os.path.join(tmp_work, "compiled_files")
    tmp_work_checkout = os.path.join(tmp_work, "checkout")

    os.mkdir(tmp_work)

    os.mkdir(tmp_work_test_input)
    os.mkdir(tmp_work_submission)
    os.mkdir(tmp_work_compiled)
    os.mkdir(tmp_work_checkout)

    os.chdir(tmp_work)

    # move all executable files from the compilation directory to the main tmp directory
    # Note: Must preserve the directory structure of compiled files (esp for Java)

    patterns_submission_to_runner = complete_config_obj["autograding"][
        "submission_to_runner"]

    pattern_copy("submission_to_runner", patterns_submission_to_runner,
                 submission_path, tmp_work_submission, tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_runner", patterns_submission_to_runner,
                     checkout_subdir_path, tmp_work_checkout, tmp_logs)

    # move the compiled files into the tmp_work_compiled directory
    for path, name in executable_path_list:
        if not os.path.isfile(path):
            continue
        target_path = os.path.join(tmp_work_compiled, name)
        if not os.path.exists(target_path):
            os.makedirs(os.path.dirname(target_path), exist_ok=True)
        shutil.copy(path, target_path)
        print('copied over {0}'.format(target_path))
        with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
            print('grade_item: copied over {0}'.format(target_path), file=f)

    patterns_compilation_to_runner = complete_config_obj["autograding"][
        "compilation_to_runner"]
    #copy into the actual tmp_work directory for archiving/validating
    pattern_copy("compilation_to_runner", patterns_compilation_to_runner,
                 tmp_compilation, tmp_work, tmp_logs)
    #copy into tmp_work_compiled, which is provided to each testcase
    # TODO change this as our methodology for declaring testcase dependencies becomes more robust
    pattern_copy("compilation_to_runner", patterns_compilation_to_runner,
                 tmp_compilation, tmp_work_compiled, tmp_logs)

    # copy input files to tmp_work directory
    copy_contents_into(job_id, test_input_path, tmp_work_test_input, tmp_logs)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy runner.out to the current directory
    shutil.copy(os.path.join(bin_path, "run.out"),
                os.path.join(tmp_work, "my_runner.out"))

    #set the appropriate permissions for the newly created directories
    #TODO replaces commented out code below

    add_permissions(
        os.path.join(tmp_work, "my_runner.out"), stat.S_IXUSR | stat.S_IXGRP
        | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions(tmp_work_submission,
                    stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions(tmp_work_compiled,
                    stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions(tmp_work_checkout,
                    stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    #TODO this is how permissions used to be set. It was removed because of the way it interacts with the sticky bit.
    ## give the untrusted user read/write/execute permissions on the tmp directory & files
    # os.system('ls -al {0}'.format(tmp_work))
    # add_permissions_recursive(tmp_work,
    #                           stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
    #                           stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
    #                           stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    ##################################################################################################
    #call grade_item_main_runner.py
    runner_success = grade_item_main_runner.executeTestcases(
        complete_config_obj, tmp_logs, tmp_work, queue_obj, submission_string,
        item_name, USE_DOCKER, None, which_untrusted, job_id, grading_began)
    ##################################################################################################

    if runner_success == 0:
        print(which_machine, which_untrusted, "RUNNER OK")
    else:
        print(which_machine, which_untrusted, "RUNNER FAILURE")
        grade_items_logging.log_message(job_id,
                                        is_batch_job,
                                        which_untrusted,
                                        item_name,
                                        message="RUNNER FAILURE")

    add_permissions_recursive(
        tmp_work, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP
        | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH
        | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
        | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH
        | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR
        | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP
        | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)
    add_permissions_recursive(
        tmp_compilation, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
        | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP | stat.S_IROTH
        | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR | stat.S_IWUSR
        | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP
        | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH, stat.S_IRUSR
        | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IWGRP
        | stat.S_IXGRP | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    # --------------------------------------------------------------------
    # RUN VALIDATOR
    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nVALIDATION STARTS",
              file=f)

    # copy results files from compilation...
    patterns_submission_to_validation = complete_config_obj["autograding"][
        "submission_to_validation"]
    pattern_copy("submission_to_validation", patterns_submission_to_validation,
                 submission_path, tmp_work, tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_validation",
                     patterns_submission_to_validation, checkout_subdir_path,
                     tmp_work, tmp_logs)
    patterns_compilation_to_validation = complete_config_obj["autograding"][
        "compilation_to_validation"]
    pattern_copy("compilation_to_validation",
                 patterns_compilation_to_validation, tmp_compilation, tmp_work,
                 tmp_logs)

    # remove the compilation directory
    shutil.rmtree(tmp_compilation)

    # copy output files to tmp_work directory
    copy_contents_into(job_id, test_output_path, tmp_work, tmp_logs)

    # copy any instructor custom validation code into the tmp work directory
    copy_contents_into(job_id, custom_validation_code_path, tmp_work, tmp_logs)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy validator.out to the current directory
    shutil.copy(os.path.join(bin_path, "validate.out"),
                os.path.join(tmp_work, "my_validator.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_work,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    add_permissions(
        os.path.join(tmp_work, "my_validator.out"), stat.S_IXUSR | stat.S_IXGRP
        | stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    #todo remove prints.
    print("VALIDATING")
    # validator the validator.out as the untrusted user
    with open(os.path.join(tmp_logs, "validator_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            # WIP: This option file facilitated testing...
            #USE_DOCKER = os.path.isfile("/tmp/use_docker")
            #use_docker_string="grading begins, using DOCKER" if USE_DOCKER else "grading begins (not using docker)"
            #grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,message=use_docker_string)
            container = subprocess.check_output([
                'docker', 'run', '-t', '-d', '-v', tmp + ':' + tmp,
                'ubuntu:custom'
            ]).decode('utf8').strip()
            dockerlaunch_done = dateutils.get_current_time()
            dockerlaunch_time = (dockerlaunch_done -
                                 grading_began).total_seconds()
            grade_items_logging.log_message(job_id, is_batch_job,
                                            which_untrusted, item_name,
                                            "dcct:", dockerlaunch_time,
                                            "docker container created")

            validator_success = subprocess.call([
                'docker', 'exec', '-w', tmp_work, container,
                os.path.join(tmp_work, 'my_validator.out'),
                queue_obj['gradeable'], queue_obj['who'],
                str(queue_obj['version']), submission_string
            ],
                                                stdout=logfile)
        else:
            validator_success = subprocess.call([
                os.path.join(SUBMITTY_INSTALL_DIR, "sbin",
                             "untrusted_execute"), which_untrusted,
                os.path.join(tmp_work, "my_validator.out"),
                queue_obj["gradeable"], queue_obj["who"],
                str(queue_obj["version"]), submission_string
            ],
                                                stdout=logfile)

    if validator_success == 0:
        print(which_machine, which_untrusted, "VALIDATOR OK")
    else:
        print(which_machine, which_untrusted, "VALIDATOR FAILURE")
        grade_items_logging.log_message(job_id,
                                        is_batch_job,
                                        which_untrusted,
                                        item_name,
                                        message="VALIDATION FAILURE")

    untrusted_grant_rwx_access(which_untrusted, tmp_work)

    # grab the result of autograding
    grade_result = ""
    try:
        with open(os.path.join(tmp_work, "grade.txt")) as f:
            lines = f.readlines()
            for line in lines:
                line = line.rstrip('\n')
                if line.startswith("Automatic grading total:"):
                    grade_result = line
    except:
        with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
            print("\n\nERROR: Grading incomplete -- Could not open ",
                  os.path.join(tmp_work, "grade.txt"))
            grade_items_logging.log_message(
                job_id,
                is_batch_job,
                which_untrusted,
                item_name,
                message="ERROR: grade.txt does not exist")
            grade_items_logging.log_stack_trace(job_id,
                                                is_batch_job,
                                                which_untrusted,
                                                item_name,
                                                trace=traceback.format_exc())

    # --------------------------------------------------------------------
    # MAKE RESULTS DIRECTORY & COPY ALL THE FILES THERE
    tmp_results = os.path.join(tmp, "TMP_RESULTS")

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nARCHIVING STARTS", file=f)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    os.makedirs(os.path.join(tmp_results, "details"))

    # remove the test_input directory, so we don't archive it!
    shutil.rmtree(os.path.join(tmp_work, "test_input"))

    # loop over the test case directories, and remove any files that are also in the test_input folder
    for testcase_num in range(1, len(my_testcases) + 1):
        testcase_folder = os.path.join(tmp_work,
                                       "test{:02}".format(testcase_num))
        remove_test_input_files(os.path.join(tmp_logs, "overall.txt"),
                                test_input_path, testcase_folder)

    patterns_work_to_details = complete_config_obj["autograding"][
        "work_to_details"]
    pattern_copy("work_to_details", patterns_work_to_details, tmp_work,
                 os.path.join(tmp_results, "details"), tmp_logs)

    if ("work_to_public" in complete_config_obj["autograding"]
            and len(complete_config_obj["autograding"]["work_to_public"]) > 0):
        # create the directory
        os.makedirs(os.path.join(tmp_results, "results_public"))
        # copy the files
        patterns_work_to_public = complete_config_obj["autograding"][
            "work_to_public"]
        pattern_copy("work_to_public", patterns_work_to_public, tmp_work,
                     os.path.join(tmp_results, "results_public"), tmp_logs)

    history_file_tmp = os.path.join(tmp_submission, "history.json")
    history_file = os.path.join(tmp_results, "history.json")
    if os.path.isfile(history_file_tmp):
        shutil.move(history_file_tmp, history_file)
        # fix permissions
        ta_group_id = os.stat(tmp_results).st_gid
        os.chown(history_file, int(DAEMON_UID), ta_group_id)
        add_permissions(history_file, stat.S_IRGRP)
    grading_finished = dateutils.get_current_time()

    try:
        shutil.copy(os.path.join(tmp_work, "grade.txt"), tmp_results)
    except:
        with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
            print("\n\nERROR: Grading incomplete -- Could not copy ",
                  os.path.join(tmp_work, "grade.txt"))
        grade_items_logging.log_message(
            job_id,
            is_batch_job,
            which_untrusted,
            item_name,
            message="ERROR: grade.txt does not exist")
        grade_items_logging.log_stack_trace(job_id,
                                            is_batch_job,
                                            which_untrusted,
                                            item_name,
                                            trace=traceback.format_exc())

    # -------------------------------------------------------------
    # create/append to the results history

    # grab the submission time
    with open(os.path.join(submission_path,
                           ".submit.timestamp")) as submission_time_file:
        submission_string = submission_time_file.read().rstrip()
    submission_datetime = dateutils.read_submitty_date(submission_string)

    gradeable_deadline_datetime = dateutils.read_submitty_date(
        gradeable_deadline_string)
    gradeable_deadline_longstring = dateutils.write_submitty_date(
        gradeable_deadline_datetime)
    submission_longstring = dateutils.write_submitty_date(submission_datetime)

    seconds_late = int(
        (submission_datetime - gradeable_deadline_datetime).total_seconds())
    # note: negative = not late

    grading_finished_longstring = dateutils.write_submitty_date(
        grading_finished)

    gradingtime = (grading_finished - grading_began).total_seconds()

    with open(os.path.join(tmp_submission, "queue_file.json"), 'r') as infile:
        queue_obj = json.load(infile)
    queue_obj["gradingtime"] = gradingtime
    queue_obj["grade_result"] = grade_result
    queue_obj["which_untrusted"] = which_untrusted

    with open(os.path.join(tmp_results, "queue_file.json"), 'w') as outfile:
        json.dump(queue_obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    try:
        shutil.move(os.path.join(tmp_work, "results.json"),
                    os.path.join(tmp_results, "results.json"))
    except:
        with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
            print("\n\nERROR: Grading incomplete -- Could not open/write ",
                  os.path.join(tmp_work, "results.json"))
            grade_items_logging.log_message(
                job_id,
                is_batch_job,
                which_untrusted,
                item_name,
                message="ERROR: results.json read/write error")
            grade_items_logging.log_stack_trace(job_id,
                                                is_batch_job,
                                                which_untrusted,
                                                item_name,
                                                trace=traceback.format_exc())

    write_grade_history.just_write_grade_history(
        history_file, gradeable_deadline_longstring, submission_longstring,
        seconds_late, queue_time_longstring, is_batch_job_string,
        grading_began_longstring, int(waittime), grading_finished_longstring,
        int(gradingtime), grade_result, revision)

    os.chdir(SUBMITTY_DATA_DIR)

    if USE_DOCKER:
        with open(os.path.join(tmp_logs, "overall_log.txt"), 'w') as logfile:
            chmod_success = subprocess.call([
                'docker', 'exec', '-w', tmp_work, container, 'chmod', '-R',
                'ugo+rwx', '.'
            ],
                                            stdout=logfile)

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        f.write("FINISHED GRADING!\n")

    # save the logs!
    shutil.copytree(tmp_logs, os.path.join(tmp_results, "logs"))

    # zip up results folder
    filehandle, my_results_zip_file = tempfile.mkstemp()
    zip_my_directory(tmp_results, my_results_zip_file)
    os.close(filehandle)
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp_results)
    shutil.rmtree(tmp_work)
    shutil.rmtree(tmp)

    # WIP: extra logging for testing
    #grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,message="done grading")

    # --------------------------------------------------------------------
    # CLEAN UP DOCKER
    if USE_DOCKER:
        subprocess.call(['docker', 'rm', '-f', container])
        dockerdestroy_done = dateutils.get_current_time()
        dockerdestroy_time = (dockerdestroy_done -
                              grading_finished).total_seconds()
        grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                        item_name, "ddt:", dockerdestroy_time,
                                        "docker container destroyed")

    grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                    item_name, "grade:", gradingtime,
                                    grade_result)

    return my_results_zip_file
Esempio n. 25
0
def prepare_autograding_and_submission_zip(next_directory, next_to_grade):
    os.chdir(SUBMITTY_DATA_DIR)
    # --------------------------------------------------------
    # figure out what we're supposed to grade & error checking
    obj = load_queue_file_obj(next_directory, next_to_grade)

    partial_path = os.path.join(obj["gradeable"], obj["who"],
                                str(obj["version"]))
    item_name = os.path.join(obj["semester"], obj["course"], "submissions",
                             partial_path)
    submission_path = os.path.join(SUBMITTY_DATA_DIR, "courses", item_name)
    if not os.path.isdir(submission_path):
        grade_items_logging.log_message(
            message="ERROR: the submission directory does not exist" +
            submission_path)
        raise RuntimeError("ERROR: the submission directory does not exist",
                           submission_path)
    print("pid", os.getpid(), "GRADE THIS", submission_path)
    is_vcs, vcs_type, vcs_base_url, vcs_subdirectory = get_vcs_info(
        SUBMITTY_DATA_DIR, obj["semester"], obj["course"], obj["gradeable"],
        obj["who"], obj["team"])

    is_batch_job = next_directory == BATCH_QUEUE
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"

    queue_time = get_queue_time(next_directory, next_to_grade)
    queue_time_longstring = dateutils.write_submitty_date(queue_time)
    grading_began = dateutils.get_current_time()
    waittime = (grading_began - queue_time).total_seconds()
    grade_items_logging.log_message(is_batch_job, "zip", item_name, "wait:",
                                    waittime, "")

    # --------------------------------------------------------------------
    # MAKE TEMPORARY DIRECTORY & COPY THE NECESSARY FILES THERE

    tmp = tempfile.mkdtemp()
    tmp_autograding = os.path.join(tmp, "TMP_AUTOGRADING")
    os.mkdir(tmp_autograding)
    tmp_submission = os.path.join(tmp, "TMP_SUBMISSION")
    os.mkdir(tmp_submission)

    # --------------------------------------------------------
    # various paths
    provided_code_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                      obj["semester"], obj["course"],
                                      "provided_code", obj["gradeable"])
    test_input_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                   obj["semester"], obj["course"],
                                   "test_input", obj["gradeable"])
    test_output_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                    obj["semester"], obj["course"],
                                    "test_output", obj["gradeable"])
    custom_validation_code_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                               obj["semester"], obj["course"],
                                               "custom_validation_code",
                                               obj["gradeable"])
    bin_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                            obj["course"], "bin", obj["gradeable"])
    form_json_config = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                    obj["semester"], obj["course"], "config",
                                    "form",
                                    "form_" + obj["gradeable"] + ".json")
    complete_config = os.path.join(
        SUBMITTY_DATA_DIR, "courses", obj["semester"], obj["course"], "config",
        "complete_config", "complete_config_" + obj["gradeable"] + ".json")

    copytree_if_exists(provided_code_path,
                       os.path.join(tmp_autograding, "provided_code"))
    copytree_if_exists(test_input_path,
                       os.path.join(tmp_autograding, "test_input"))
    copytree_if_exists(test_output_path,
                       os.path.join(tmp_autograding, "test_output"))
    copytree_if_exists(custom_validation_code_path,
                       os.path.join(tmp_autograding, "custom_validation_code"))
    copytree_if_exists(bin_path, os.path.join(tmp_autograding, "bin"))
    shutil.copy(form_json_config, os.path.join(tmp_autograding, "form.json"))
    shutil.copy(complete_config,
                os.path.join(tmp_autograding, "complete_config.json"))

    checkout_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                                 obj["course"], "checkout", partial_path)
    results_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                                obj["course"], "results", partial_path)

    # grab a copy of the current history.json file (if it exists)
    history_file = os.path.join(results_path, "history.json")
    history_file_tmp = ""
    if os.path.isfile(history_file):
        filehandle, history_file_tmp = tempfile.mkstemp()
        shutil.copy(history_file, history_file_tmp)
        shutil.copy(history_file, os.path.join(tmp_submission, "history.json"))

    # get info from the gradeable config file
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    checkout_subdirectory = complete_config_obj["autograding"].get(
        "use_checkout_subdirectory", "")
    checkout_subdir_path = os.path.join(checkout_path, checkout_subdirectory)
    queue_file = os.path.join(next_directory, next_to_grade)

    # switch to tmp directory
    os.chdir(tmp)

    # make the logs directory
    tmp_logs = os.path.join(tmp, "TMP_SUBMISSION", "tmp_logs")
    os.makedirs(tmp_logs)
    # 'touch' a file in the logs folder
    open(os.path.join(tmp_logs, "overall.txt"), 'a')

    # grab the submission time
    with open(os.path.join(submission_path,
                           ".submit.timestamp")) as submission_time_file:
        submission_string = submission_time_file.read().rstrip()

    submission_datetime = dateutils.read_submitty_date(submission_string)

    # --------------------------------------------------------------------
    # CHECKOUT THE STUDENT's REPO
    if is_vcs:
        # is vcs_subdirectory standalone or should it be combined with base_url?
        if vcs_subdirectory[0] == '/' or '://' in vcs_subdirectory:
            vcs_path = vcs_subdirectory
        else:
            if '://' in vcs_base_url:
                vcs_path = urllib.parse.urljoin(vcs_base_url, vcs_subdirectory)
            else:
                vcs_path = os.path.join(vcs_base_url, vcs_subdirectory)

        with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
            print("====================================\nVCS CHECKOUT", file=f)
            print('vcs_base_url', vcs_base_url, file=f)
            print('vcs_subdirectory', vcs_subdirectory, file=f)
            print('vcs_path', vcs_path, file=f)
            print(['/usr/bin/git', 'clone', vcs_path, checkout_path], file=f)

        # cleanup the previous checkout (if it exists)
        shutil.rmtree(checkout_path, ignore_errors=True)
        os.makedirs(checkout_path, exist_ok=True)
        subprocess.call(['/usr/bin/git', 'clone', vcs_path, checkout_path])
        os.chdir(checkout_path)

        # determine which version we need to checkout
        what_version = subprocess.check_output([
            'git', 'rev-list', '-n', '1',
            '--before="' + submission_string + '"', 'master'
        ])
        what_version = str(what_version.decode('utf-8')).rstrip()
        if what_version == "":
            # oops, pressed the grade button before a valid commit
            shutil.rmtree(checkout_path, ignore_errors=True)
        else:
            # and check out the right version
            subprocess.call(['git', 'checkout', '-b', 'grade', what_version])
        os.chdir(tmp)
        subprocess.call(['ls', '-lR', checkout_path],
                        stdout=open(tmp_logs + "/overall.txt", 'a'))

    copytree_if_exists(submission_path,
                       os.path.join(tmp_submission, "submission"))
    copytree_if_exists(checkout_path, os.path.join(tmp_submission, "checkout"))
    obj["queue_time"] = queue_time_longstring
    obj["is_batch_job"] = is_batch_job
    obj["waittime"] = waittime

    with open(os.path.join(tmp_submission, "queue_file.json"), 'w') as outfile:
        json.dump(obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    grading_began_longstring = dateutils.write_submitty_date(grading_began)
    with open(os.path.join(tmp_submission, ".grading_began"), 'w') as f:
        print(grading_began_longstring, file=f)

    # zip up autograding & submission folders
    my_autograding_zip_file = tempfile.mkstemp()[1]
    my_submission_zip_file = tempfile.mkstemp()[1]
    zip_my_directory(tmp_autograding, my_autograding_zip_file)
    zip_my_directory(tmp_submission, my_submission_zip_file)
    # cleanup
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp)

    return (my_autograding_zip_file, my_submission_zip_file)
Esempio n. 26
0
def prepare_autograding_and_submission_zip(which_machine, which_untrusted,
                                           next_directory, next_to_grade):
    os.chdir(SUBMITTY_DATA_DIR)

    # generate a random id to be used to track this job in the autograding logs
    job_id = ''.join(
        random.choice(string.ascii_letters + string.digits) for _ in range(6))

    # --------------------------------------------------------
    # figure out what we're supposed to grade & error checking
    obj = load_queue_file_obj(job_id, next_directory, next_to_grade)

    partial_path = os.path.join(obj["gradeable"], obj["who"],
                                str(obj["version"]))
    item_name = os.path.join(obj["semester"], obj["course"], "submissions",
                             partial_path)
    submission_path = os.path.join(SUBMITTY_DATA_DIR, "courses", item_name)
    if not os.path.isdir(submission_path):
        grade_items_logging.log_message(
            job_id,
            message="ERROR: the submission directory does not exist " +
            submission_path)
        raise RuntimeError("ERROR: the submission directory does not exist",
                           submission_path)
    print(which_machine, which_untrusted, "prepare zip", submission_path)
    is_vcs, vcs_type, vcs_base_url, vcs_subdirectory = get_vcs_info(
        SUBMITTY_DATA_DIR, obj["semester"], obj["course"], obj["gradeable"],
        obj["who"], obj["team"])

    is_batch_job = "regrade" in obj and obj["regrade"]
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"

    queue_time = get_queue_time(next_directory, next_to_grade)
    queue_time_longstring = dateutils.write_submitty_date(queue_time)
    grading_began = dateutils.get_current_time()
    waittime = (grading_began - queue_time).total_seconds()
    grade_items_logging.log_message(job_id, is_batch_job, "zip", item_name,
                                    "wait:", waittime, "")

    # --------------------------------------------------------
    # various paths
    provided_code_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                      obj["semester"], obj["course"],
                                      "provided_code", obj["gradeable"])
    instructor_solution_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                            obj["semester"], obj["course"],
                                            "instructor_solution",
                                            obj["gradeable"])
    test_input_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                   obj["semester"], obj["course"],
                                   "test_input", obj["gradeable"])
    test_output_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                    obj["semester"], obj["course"],
                                    "test_output", obj["gradeable"])
    custom_validation_code_path = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                               obj["semester"], obj["course"],
                                               "custom_validation_code",
                                               obj["gradeable"])
    bin_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                            obj["course"], "bin", obj["gradeable"])
    form_json_config = os.path.join(SUBMITTY_DATA_DIR, "courses",
                                    obj["semester"], obj["course"], "config",
                                    "form",
                                    "form_" + obj["gradeable"] + ".json")
    complete_config = os.path.join(
        SUBMITTY_DATA_DIR, "courses", obj["semester"], obj["course"], "config",
        "complete_config", "complete_config_" + obj["gradeable"] + ".json")

    if not os.path.exists(form_json_config):
        grade_items_logging.log_message(
            job_id,
            message="ERROR: the form json file does not exist " +
            form_json_config)
        raise RuntimeError("ERROR: the form json file does not exist ",
                           form_json_config)
    if not os.path.exists(complete_config):
        grade_items_logging.log_message(
            job_id,
            message="ERROR: the complete config file does not exist " +
            complete_config)
        raise RuntimeError("ERROR: the complete config file does not exist ",
                           complete_config)

    # --------------------------------------------------------------------
    # MAKE TEMPORARY DIRECTORY & COPY THE NECESSARY FILES THERE
    tmp = tempfile.mkdtemp()
    tmp_autograding = os.path.join(tmp, "TMP_AUTOGRADING")
    os.mkdir(tmp_autograding)
    tmp_submission = os.path.join(tmp, "TMP_SUBMISSION")
    os.mkdir(tmp_submission)

    copytree_if_exists(provided_code_path,
                       os.path.join(tmp_autograding, "provided_code"))
    copytree_if_exists(instructor_solution_path,
                       os.path.join(tmp_autograding, "instructor_solution"))
    copytree_if_exists(test_input_path,
                       os.path.join(tmp_autograding, "test_input"))
    copytree_if_exists(test_output_path,
                       os.path.join(tmp_autograding, "test_output"))
    copytree_if_exists(custom_validation_code_path,
                       os.path.join(tmp_autograding, "custom_validation_code"))
    copytree_if_exists(bin_path, os.path.join(tmp_autograding, "bin"))
    shutil.copy(form_json_config, os.path.join(tmp_autograding, "form.json"))
    shutil.copy(complete_config,
                os.path.join(tmp_autograding, "complete_config.json"))

    checkout_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                                 obj["course"], "checkout", partial_path)
    results_path = os.path.join(SUBMITTY_DATA_DIR, "courses", obj["semester"],
                                obj["course"], "results", partial_path)

    # grab a copy of the current history.json file (if it exists)
    history_file = os.path.join(results_path, "history.json")
    history_file_tmp = ""
    if os.path.isfile(history_file):
        shutil.copy(history_file, os.path.join(tmp_submission, "history.json"))
    # get info from the gradeable config file
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    checkout_subdirectory = complete_config_obj["autograding"].get(
        "use_checkout_subdirectory", "")
    checkout_subdir_path = os.path.join(checkout_path, checkout_subdirectory)
    queue_file = os.path.join(next_directory, next_to_grade)

    # switch to tmp directory
    os.chdir(tmp)

    # make the logs directory
    tmp_logs = os.path.join(tmp, "TMP_SUBMISSION", "tmp_logs")
    os.makedirs(tmp_logs)
    # 'touch' a file in the logs folder
    open(os.path.join(tmp_logs, "overall.txt"), 'a')

    # --------------------------------------------------------------------
    # CONFIRM WE HAVE A CHECKOUT OF THE STUDENT'S REPO
    if is_vcs:
        # there should be a checkout log file in the results directory
        # move that file to the tmp logs directory..
        vcs_checkout_logfile = os.path.join(results_path, "logs",
                                            "vcs_checkout.txt")
        if os.path.isfile(vcs_checkout_logfile):
            shutil.move(vcs_checkout_logfile, tmp_logs)
        else:
            grade_items_logging.log_message(
                JOB_ID,
                message=str(my_name) +
                " ERROR: missing vcs_checkout.txt logfile " +
                str(vcs_checkout_logfile))

    copytree_if_exists(submission_path,
                       os.path.join(tmp_submission, "submission"))
    copytree_if_exists(checkout_path, os.path.join(tmp_submission, "checkout"))
    obj["queue_time"] = queue_time_longstring
    obj["regrade"] = is_batch_job
    obj["waittime"] = waittime
    obj["job_id"] = job_id

    with open(os.path.join(tmp_submission, "queue_file.json"), 'w') as outfile:
        json.dump(obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    grading_began_longstring = dateutils.write_submitty_date(grading_began)
    with open(os.path.join(tmp_submission, ".grading_began"), 'w') as f:
        print(grading_began_longstring, file=f)

    # zip up autograding & submission folders
    filehandle1, my_autograding_zip_file = tempfile.mkstemp()
    filehandle2, my_submission_zip_file = tempfile.mkstemp()
    grade_item.zip_my_directory(tmp_autograding, my_autograding_zip_file)
    grade_item.zip_my_directory(tmp_submission, my_submission_zip_file)
    os.close(filehandle1)
    os.close(filehandle2)
    # cleanup
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp)

    #grade_items_logging.log_message(job_id,is_batch_job,"done zip",item_name)

    return (my_autograding_zip_file, my_submission_zip_file)
Esempio n. 27
0
def archive_autograding_results(working_directory, job_id, which_untrusted, is_batch_job, complete_config_obj, 
                                gradeable_config_obj, queue_obj, log_path, stack_trace_log_path, is_test_environment):
    """ After grading is finished, archive the results. """

    tmp_autograding = os.path.join(working_directory,"TMP_AUTOGRADING")
    tmp_submission = os.path.join(working_directory,"TMP_SUBMISSION")
    tmp_work = os.path.join(working_directory,"TMP_WORK")
    tmp_logs = os.path.join(working_directory,"TMP_SUBMISSION","tmp_logs")
    tmp_results = os.path.join(working_directory,"TMP_RESULTS")
    submission_path = os.path.join(tmp_submission, "submission")
    random_output_path = os.path.join(tmp_work, 'random_output')

    partial_path = os.path.join(queue_obj["gradeable"],queue_obj["who"],str(queue_obj["version"]))
    item_name = os.path.join(queue_obj["semester"],queue_obj["course"],"submissions",partial_path)
    results_public_dir = os.path.join(tmp_results,"results_public")
    results_details_dir = os.path.join(tmp_results, "details")
    patterns = complete_config_obj['autograding']

    # Copy work to details
    pattern_copy("work_to_details", patterns['work_to_details'], tmp_work, results_details_dir, tmp_logs)
    
    # Copy work to public
    if 'work_to_public' in patterns:
        pattern_copy("work_to_public", patterns['work_to_public'], tmp_work, results_public_dir, tmp_logs)

    if os.path.exists(random_output_path):
        pattern_copy("work_to_random_output", [os.path.join(random_output_path, 'test*', '**', '*.txt'),], tmp_work, tmp_results, tmp_logs)
    # grab the submission time
    with open(os.path.join(tmp_submission, 'submission' ,".submit.timestamp"), 'r') as submission_time_file:
        submission_string = submission_time_file.read().rstrip()

    history_file_tmp = os.path.join(tmp_submission,"history.json")
    history_file = os.path.join(tmp_results,"history.json")
    if os.path.isfile(history_file_tmp) and not is_test_environment:

        from . import CONFIG_PATH
        with open(os.path.join(CONFIG_PATH, 'submitty_users.json')) as open_file:
            OPEN_JSON = json.load(open_file)
        DAEMON_UID = OPEN_JSON['daemon_uid']

        shutil.move(history_file_tmp, history_file)
        # fix permissions
        ta_group_id = os.stat(tmp_results).st_gid
        os.chown(history_file, int(DAEMON_UID),ta_group_id)
        add_permissions(history_file, stat.S_IRGRP)
    grading_finished = dateutils.get_current_time()


    try:
        shutil.copy(os.path.join(tmp_work, "grade.txt"), tmp_results)
    except:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("\n\nERROR: Grading incomplete -- Could not copy ",os.path.join(tmp_work,"grade.txt"))
        log_message(log_path, job_id, is_batch_job, which_untrusted, item_name, message="ERROR: grade.txt does not exist")
        log_stack_trace(stack_trace_log_path, job_id, is_batch_job, which_untrusted, item_name, trace=traceback.format_exc())

    grade_result = ""
    try:
        with open(os.path.join(tmp_work,"grade.txt")) as f:
            lines = f.readlines()
            for line in lines:
                line = line.rstrip('\n')
                if line.startswith("Automatic grading total:"):
                    grade_result = line
    except:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("\n\nERROR: Grading incomplete -- Could not open ",os.path.join(tmp_work,"grade.txt"))
            log_message(job_id,is_batch_job,which_untrusted,item_name,message="ERROR: grade.txt does not exist")
            log_stack_trace(job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())


    gradeable_deadline_string = gradeable_config_obj["date_due"]
    
    submission_datetime = dateutils.read_submitty_date(submission_string)
    gradeable_deadline_datetime = dateutils.read_submitty_date(gradeable_deadline_string)
    gradeable_deadline_longstring = dateutils.write_submitty_date(gradeable_deadline_datetime)
    submission_longstring = dateutils.write_submitty_date(submission_datetime)
    seconds_late = int((submission_datetime-gradeable_deadline_datetime).total_seconds())

    # note: negative = not late
    grading_finished_longstring = dateutils.write_submitty_date(grading_finished)

    with open(os.path.join(tmp_submission,".grading_began"), 'r') as f:
        grading_began_longstring = f.read()
    grading_began = dateutils.read_submitty_date(grading_began_longstring)

    gradingtime = (grading_finished - grading_began).total_seconds()

    queue_obj["gradingtime"]=gradingtime
    queue_obj["grade_result"]=grade_result
    queue_obj["which_untrusted"]=which_untrusted
    waittime = queue_obj["waittime"]

    with open(os.path.join(tmp_results,"queue_file.json"),'w') as outfile:
        json.dump(queue_obj,outfile,sort_keys=True,indent=4,separators=(',', ': '))

    try:
        shutil.move(os.path.join(tmp_work, "results.json"), os.path.join(tmp_results, "results.json"))
    except:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("\n\nERROR: Grading incomplete -- Could not open/write ",os.path.join(tmp_work,"results.json"))
            log_message(log_path, job_id,is_batch_job,which_untrusted,item_name,message="ERROR: results.json read/write error")
            log_stack_trace(stack_trace_log_path, job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())

    just_write_grade_history(history_file,
                             gradeable_deadline_longstring,
                             submission_longstring,
                             seconds_late,
                             queue_obj["queue_time"],
                             "BATCH" if is_batch_job else "INTERACTIVE",
                             grading_began_longstring,
                             int(waittime),
                             grading_finished_longstring,
                             int(gradingtime),
                             grade_result,
                             queue_obj.get("revision", None))

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        f.write("FINISHED GRADING!\n")

    # save the logs!
    shutil.copytree(tmp_logs,os.path.join(tmp_results,"logs"))
    log_message(log_path, job_id,is_batch_job,which_untrusted,item_name,"grade:",gradingtime,grade_result)
Esempio n. 28
0
def just_grade_item(next_directory,next_to_grade,which_untrusted):

    my_pid = os.getpid()

    # verify the hwcron user is running this script
    if not int(os.getuid()) == int(HWCRON_UID):
        grade_items_logging.log_message("ERROR: must be run by hwcron")
        raise SystemExit("ERROR: the grade_item.py script must be run by the hwcron user")

    # --------------------------------------------------------
    # figure out what we're supposed to grade & error checking
    obj = get_submission_path(next_directory,next_to_grade)
    submission_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],
                                   "submissions",obj["gradeable"],obj["who"],str(obj["version"]))
    if not os.path.isdir(submission_path):
        grade_items_logging.log_message("ERROR: the submission directory does not exist" + submission_path)
        raise SystemExit("ERROR: the submission directory does not exist",submission_path)
    print ("pid",my_pid,"GRADE THIS", submission_path)

    is_vcs,vcs_type,vcs_base_url,vcs_subdirectory = get_vcs_info(SUBMITTY_DATA_DIR,obj["semester"],obj["course"],obj["gradeable"],obj["who"],obj["team"])

    is_batch_job = next_directory==BATCH_QUEUE
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"

    queue_time = get_queue_time(next_directory,next_to_grade)
    queue_time_longstring = dateutils.write_submitty_date(queue_time)
    grading_began = dateutils.get_current_time()
    waittime = int((grading_began-queue_time).total_seconds())
    grade_items_logging.log_message(is_batch_job,which_untrusted,submission_path,"wait:",waittime,"")

    # --------------------------------------------------------
    # various paths
    provided_code_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"provided_code",obj["gradeable"])
    test_input_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"test_input",obj["gradeable"])
    test_output_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"test_output",obj["gradeable"])
    custom_validation_code_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"custom_validation_code",obj["gradeable"])
    bin_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"bin")

    checkout_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"checkout",obj["gradeable"],obj["who"],str(obj["version"]))
    results_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"results",obj["gradeable"],obj["who"],str(obj["version"]))

    # grab a copy of the current history.json file (if it exists)
    history_file = os.path.join(results_path,"history.json")
    history_file_tmp = ""
    if os.path.isfile(history_file):
        filehandle,history_file_tmp = tempfile.mkstemp()
        shutil.copy(history_file,history_file_tmp)

    # get info from the gradeable config file
    json_config = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"config","form","form_"+obj["gradeable"]+".json")
    with open(json_config, 'r') as infile:
        gradeable_config_obj = json.load(infile)

    # get info from the gradeable config file
    complete_config = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"config","complete_config","complete_config_"+obj["gradeable"]+".json")
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    checkout_subdirectory = complete_config_obj["autograding"].get("use_checkout_subdirectory","")
    checkout_subdir_path = os.path.join(checkout_path,checkout_subdirectory)

    # --------------------------------------------------------------------
    # MAKE TEMPORARY DIRECTORY & COPY THE NECESSARY FILES THERE
    tmp = os.path.join("/var/local/submitty/autograding_tmp/",which_untrusted,"tmp")
    shutil.rmtree(tmp,ignore_errors=True)
    os.makedirs(tmp)
    
    # switch to tmp directory
    os.chdir(tmp)

    # make the logs directory
    tmp_logs = os.path.join(tmp,"tmp_logs")
    os.makedirs(tmp_logs)

    # grab the submission time
    with open (os.path.join(submission_path,".submit.timestamp")) as submission_time_file:
        submission_string = submission_time_file.read().rstrip()
    
    submission_datetime = dateutils.read_submitty_date(submission_string)


    # --------------------------------------------------------------------
    # CHECKOUT THE STUDENT's REPO
    if is_vcs:
        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            print ("====================================\nVCS CHECKOUT", file=f)
            print ("vcs_subdirectory",vcs_subdirectory, file=f)
        # cleanup the previous checkout (if it exists)
        shutil.rmtree(checkout_path,ignore_errors=True)
        os.makedirs(checkout_path, exist_ok=True)
        subprocess.call (['/usr/bin/git', 'clone', vcs_subdirectory, checkout_path])
        os.chdir(checkout_path)

        # determine which version we need to checkout
        what_version = subprocess.check_output(['git', 'rev-list', '-n', '1', '--before="'+submission_string+'"', 'master'])
        what_version = str(what_version.decode('utf-8')).rstrip()
        if what_version == "":
            # oops, pressed the grade button before a valid commit
            shutil.rmtree(checkout_path,ignore_errors=True)
        else:
            # and check out the right version
            subprocess.call (['git', 'checkout', '-b', 'grade', what_version])
        os.chdir(tmp)
        subprocess.call(['ls', '-lR', checkout_path], stdout=open(tmp_logs + "/overall.txt", 'a'))


    # --------------------------------------------------------------------
    # START DOCKER

    container = None
    if USE_DOCKER:
        container = subprocess.check_output(['docker', 'run', '-t', '-d',
                                             '-v', tmp + ':' + tmp,
                                             'ubuntu:custom']).decode('utf8').strip()

    # --------------------------------------------------------------------
    # COMPILE THE SUBMITTED CODE

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nCOMPILATION STARTS", file=f)
    
    # copy submitted files to the tmp compilation directory
    tmp_compilation = os.path.join(tmp,"TMP_COMPILATION")
    os.mkdir(tmp_compilation)
    os.chdir(tmp_compilation)
    
    gradeable_deadline_string = gradeable_config_obj["date_due"]
    
    patterns_submission_to_compilation = complete_config_obj["autograding"]["submission_to_compilation"]
    pattern_copy("submission_to_compilation",patterns_submission_to_compilation,submission_path,tmp_compilation,tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_compilation",patterns_submission_to_compilation,checkout_subdir_path,tmp_compilation,tmp_logs)
    
    # copy any instructor provided code files to tmp compilation directory
    copy_contents_into(provided_code_path,tmp_compilation,tmp_logs)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy compile.out to the current directory
    shutil.copy (os.path.join(bin_path,obj["gradeable"],"compile.out"),os.path.join(tmp_compilation,"my_compile.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_compilation,
                              stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP,
                              stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP,
                              stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP)

    add_permissions(tmp,stat.S_IROTH | stat.S_IXOTH)
    add_permissions(tmp_logs,stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)

    with open(os.path.join(tmp_logs,"compilation_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            compile_success = subprocess.call(['docker', 'exec', '-w', tmp_compilation, container,
                                               os.path.join(tmp_compilation, 'my_compile.out'), obj['gradeable'],
                                               obj['who'], str(obj['version']), submission_string], stdout=logfile)
        else:
            compile_success = subprocess.call([os.path.join(SUBMITTY_INSTALL_DIR,"bin","untrusted_execute"),
                                               which_untrusted,
                                               os.path.join(tmp_compilation,"my_compile.out"),
                                               obj["gradeable"],
                                               obj["who"],
                                               str(obj["version"]),
                                               submission_string],
                                              stdout=logfile)

    if compile_success == 0:
        print ("pid",my_pid,"COMPILATION OK")
    else:
        print ("pid",my_pid,"COMPILATION FAILURE")
        grade_items_logging.log_message(is_batch_job,which_untrusted,submission_path,"","","COMPILATION FAILURE")
    #raise SystemExit()

    untrusted_grant_rwx_access(which_untrusted,tmp_compilation)
        
    # remove the compilation program
    os.remove(os.path.join(tmp_compilation,"my_compile.out"))

    # return to the main tmp directory
    os.chdir(tmp)


    # --------------------------------------------------------------------
    # make the runner directory

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        print ("====================================\nRUNNER STARTS", file=f)
        
    tmp_work = os.path.join(tmp,"TMP_WORK")
    os.makedirs(tmp_work)
    os.chdir(tmp_work)

    # move all executable files from the compilation directory to the main tmp directory
    # Note: Must preserve the directory structure of compiled files (esp for Java)

    patterns_submission_to_runner = complete_config_obj["autograding"]["submission_to_runner"]
    pattern_copy("submission_to_runner",patterns_submission_to_runner,submission_path,tmp_work,tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_runner",patterns_submission_to_runner,checkout_subdir_path,tmp_work,tmp_logs)

    patterns_compilation_to_runner = complete_config_obj["autograding"]["compilation_to_runner"]
    pattern_copy("compilation_to_runner",patterns_compilation_to_runner,tmp_compilation,tmp_work,tmp_logs)
        
    # copy input files to tmp_work directory
    copy_contents_into(test_input_path,tmp_work,tmp_logs)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy runner.out to the current directory
    shutil.copy (os.path.join(bin_path,obj["gradeable"],"run.out"),os.path.join(tmp_work,"my_runner.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_work,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    # raise SystemExit()
    # run the run.out as the untrusted user
    with open(os.path.join(tmp_logs,"runner_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            runner_success = subprocess.call(['docker', 'exec', '-w', tmp_work, container,
                                              os.path.join(tmp_work, 'my_runner.out'), obj['gradeable'],
                                              obj['who'], str(obj['version']), submission_string], stdout=logfile)
        else:
            runner_success = subprocess.call([os.path.join(SUBMITTY_INSTALL_DIR,"bin","untrusted_execute"),
                                              which_untrusted,
                                              os.path.join(tmp_work,"my_runner.out"),
                                              obj["gradeable"],
                                              obj["who"],
                                              str(obj["version"]),
                                              submission_string],
                                              stdout=logfile)

    if runner_success == 0:
        print ("pid",my_pid,"RUNNER OK")
    else:
        print ("pid",my_pid,"RUNNER FAILURE")
        grade_items_logging.log_message(is_batch_job,which_untrusted,submission_path,"","","RUNNER FAILURE")

    untrusted_grant_rwx_access(which_untrusted,tmp_work)
    untrusted_grant_rwx_access(which_untrusted,tmp_compilation)

    # --------------------------------------------------------------------
    # RUN VALIDATOR

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        print ("====================================\nVALIDATION STARTS", file=f)

    # copy results files from compilation...
    patterns_submission_to_validation = complete_config_obj["autograding"]["submission_to_validation"]
    pattern_copy("submission_to_validation",patterns_submission_to_validation,submission_path,tmp_work,tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_validation",patterns_submission_to_validation,checkout_subdir_path,tmp_work,tmp_logs)
    patterns_compilation_to_validation = complete_config_obj["autograding"]["compilation_to_validation"]
    pattern_copy("compilation_to_validation",patterns_compilation_to_validation,tmp_compilation,tmp_work,tmp_logs)

    # remove the compilation directory
    shutil.rmtree(tmp_compilation)

    # copy output files to tmp_work directory
    copy_contents_into(test_output_path,tmp_work,tmp_logs)

    # copy any instructor custom validation code into the tmp work directory
    copy_contents_into(custom_validation_code_path,tmp_work,tmp_logs)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy validator.out to the current directory
    shutil.copy (os.path.join(bin_path,obj["gradeable"],"validate.out"),os.path.join(tmp_work,"my_validator.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_work,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    add_permissions(os.path.join(tmp_work,"my_validator.out"),stat.S_IROTH | stat.S_IXOTH)

    # validator the validator.out as the untrusted user
    with open(os.path.join(tmp_logs,"validator_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            validator_success = subprocess.call(['docker', 'exec', '-w', tmp_work, container,
                                                 os.path.join(tmp_work, 'my_validator.out'), obj['gradeable'],
                                                 obj['who'], str(obj['version']), submission_string], stdout=logfile)
        else:
            validator_success = subprocess.call([os.path.join(SUBMITTY_INSTALL_DIR,"bin","untrusted_execute"),
                                                 which_untrusted,
                                                 os.path.join(tmp_work,"my_validator.out"),
                                                 obj["gradeable"],
                                                 obj["who"],
                                                 str(obj["version"]),
                                                 submission_string],
                                                stdout=logfile)

    if validator_success == 0:
        print ("pid",my_pid,"VALIDATOR OK")
    else:
        print ("pid",my_pid,"VALIDATOR FAILURE")
        grade_items_logging.log_message(is_batch_job,which_untrusted,submission_path,"","","VALIDATION FAILURE")

    untrusted_grant_rwx_access(which_untrusted,tmp_work)

    # grab the result of autograding
    grade_result = ""
    with open(os.path.join(tmp_work,"grade.txt")) as f:
        lines = f.readlines()
        for line in lines:
            line = line.rstrip('\n')
            if line.startswith("Automatic grading total:"):
                grade_result = line

    # --------------------------------------------------------------------
    # MAKE RESULTS DIRECTORY & COPY ALL THE FILES THERE

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        print ("====================================\nARCHIVING STARTS", file=f)

    subprocess.call(['ls', '-lR', '.'], stdout=open(tmp_logs + "/overall.txt", 'a'))

    os.chdir(bin_path)

    # save the old results path!
    if os.path.isdir(os.path.join(results_path,"OLD")):
        shutil.move(os.path.join(results_path,"OLD"),
                    os.path.join(tmp,"OLD_RESULTS"))

    # clean out all of the old files if this is a re-run
    shutil.rmtree(results_path,ignore_errors=True)

    # create the directory (and the full path if it doesn't already exist)
    os.makedirs(results_path)

    # bring back the old results!
    if os.path.isdir(os.path.join(tmp,"OLD_RESULTS")):
        shutil.move(os.path.join(tmp,"OLD_RESULTS"),
                    os.path.join(results_path,"OLD"))

    os.makedirs(os.path.join(results_path,"details"))

    patterns_work_to_details = complete_config_obj["autograding"]["work_to_details"]
    pattern_copy("work_to_details",patterns_work_to_details,tmp_work,os.path.join(results_path,"details"),tmp_logs)

    if not history_file_tmp == "":
        shutil.move(history_file_tmp,history_file)
        # fix permissions
        ta_group_id = os.stat(results_path).st_gid
        os.chown(history_file,int(HWCRON_UID),ta_group_id)
        add_permissions(history_file,stat.S_IRGRP)
        
    grading_finished = dateutils.get_current_time()

    shutil.copy(os.path.join(tmp_work,"results.json"),results_path)
    shutil.copy(os.path.join(tmp_work,"grade.txt"),results_path)

    # -------------------------------------------------------------
    # create/append to the results history

    gradeable_deadline_datetime = dateutils.read_submitty_date(gradeable_deadline_string)
    gradeable_deadline_longstring = dateutils.write_submitty_date(gradeable_deadline_datetime)
    submission_longstring = dateutils.write_submitty_date(submission_datetime)
    
    seconds_late = int((submission_datetime-gradeable_deadline_datetime).total_seconds())
    # note: negative = not late

    grading_began_longstring = dateutils.write_submitty_date(grading_began)
    grading_finished_longstring = dateutils.write_submitty_date(grading_finished)

    gradingtime = int((grading_finished-grading_began).total_seconds())

    write_grade_history.just_write_grade_history(history_file,
                                                 gradeable_deadline_longstring,
                                                 submission_longstring,
                                                 seconds_late,
                                                 queue_time_longstring,
                                                 is_batch_job_string,
                                                 grading_began_longstring,
                                                 waittime,
                                                 grading_finished_longstring,
                                                 gradingtime,
                                                 grade_result)

    #---------------------------------------------------------------------
    # WRITE OUT VERSION DETAILS
    if WRITE_DATABASE:
        insert_database_version_data.insert_to_database(
            obj["semester"],
            obj["course"],
            obj["gradeable"],
            obj["user"],
            obj["team"],
            obj["who"],
            True if obj["is_team"] else False,
            str(obj["version"]))

    print ("pid",my_pid,"finished grading ", next_to_grade, " in ", gradingtime, " seconds")

    grade_items_logging.log_message(is_batch_job,which_untrusted,submission_path,"grade:",gradingtime,grade_result)

    with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
        f.write("FINISHED GRADING!")

    # save the logs!
    shutil.copytree(tmp_logs,os.path.join(results_path,"logs"))

    # --------------------------------------------------------------------
    # REMOVE TEMP DIRECTORY
    shutil.rmtree(tmp)

    # --------------------------------------------------------------------
    # CLEAN UP DOCKER
    if USE_DOCKER:
        subprocess.call(['docker', 'rm', '-f', container])
def get_job(my_name,which_machine,my_capabilities,which_untrusted,overall_lock):
    """
    Picks a job from the queue

    :param overall_lock: a lock on the directory containing all queue files
    """

    time_get_job_begin = dateutils.get_current_time()

    overall_lock.acquire()
    folder= INTERACTIVE_QUEUE


    # ----------------------------------------------------------------
    # Our first priority is to perform any awaiting VCS checkouts

    # Note: This design is imperfect:
    #
    #   * If all shippers are busy working on long-running autograding
    #     tasks there will be a delay of seconds or minutes between
    #     a student pressing the submission button and clone happening.
    #     This is a minor exploit allowing them to theoretically
    #     continue working on their submission past the deadline for
    #     the time period of the delay.
    #     -- This is not a significant, practical problem.
    #
    #   * If multiple and/or large git submissions arrive close
    #     together, this shipper job will be tied up performing these
    #     clone operations.  Because we don't release the lock, any
    #     other shippers that complete their work will also be blocked
    #     from either helping with the clones or tackling the next
    #     autograding job.
    #     -- Based on experience with actual submission patterns, we
    #        do not anticipate that this will be a significant
    #        bottleneck at this time.
    #
    #   * If a git clone takes a very long time and/or hangs because of
    #     network problems, this could halt all work on the server.
    #     -- We'll need to monitor the production server.
    #
    # We plan to do a complete overhaul of the
    # scheduler/shipper/worker and refactoring this design should be
    # part of the project.

    # Grab all the VCS files currently in the folder...
    vcs_files = [str(f) for f in Path(folder).glob('VCS__*')]
    for f in vcs_files:
        vcs_file = f[len(folder)+1:]
        no_vcs_file = f[len(folder)+1+5:]
        # do the checkout
        updated_obj = checkout_vcs_repo(folder+"/"+vcs_file)
        # save the regular grading queue file
        with open(os.path.join(folder,no_vcs_file), "w") as queue_file:
            json.dump(updated_obj, queue_file)
        # cleanup the vcs queue file
        os.remove(folder+"/"+vcs_file)
    # ----------------------------------------------------------------


    # Grab all the files currently in the folder, sorted by creation
    # time, and put them in the queue to be graded
    files = [str(f) for f in Path(folder).glob('*')]
    files_and_times = list()
    for f in files:
        try:
            my_time = os.path.getctime(f)
        except:
            continue
        tup = (f, my_time)
        files_and_times.append(tup)

    files_and_times = sorted(files_and_times, key=operator.itemgetter(1))
    my_job=""

    for full_path_file, file_time in files_and_times:
        # get the file name (without the path)
        just_file = full_path_file[len(folder)+1:]
        # skip items that are already being graded
        if (just_file[0:8]=="GRADING_"):
            continue
        grading_file = os.path.join(folder,"GRADING_"+just_file)
        if grading_file in files:
            continue

        # found something to do
        try:
            with open(full_path_file, 'r') as infile:
                queue_obj = json.load(infile)
        except:
            continue

        #Check to make sure that we are capable of grading this submission
        required_capabilities = queue_obj["required_capabilities"]
        if not required_capabilities in my_capabilities:
            continue

        # prioritize interactive jobs over (batch) regrades
        # if you've found an interactive job, exit early (since they are sorted by timestamp)
        if not "regrade" in queue_obj or not queue_obj["regrade"]:
            my_job = just_file
            break

        # otherwise it's a regrade, and if we don't already have a
        # job, take it, but we have to search the rest of the list
        if my_job == "":
            my_job = just_file

    if not my_job == "":
        grading_file = os.path.join(folder, "GRADING_" + my_job)
        # create the grading file
        with open(os.path.join(grading_file), "w") as queue_file:
            json.dump({"untrusted": which_untrusted}, queue_file)

    overall_lock.release()

    time_get_job_end = dateutils.get_current_time()

    time_delta = time_get_job_end-time_get_job_begin
    if time_delta > datetime.timedelta(milliseconds=100):
        print (my_name, " WARNING: submitty_autograding shipper get_job time ", time_delta)
        grade_items_logging.log_message(JOB_ID, message=str(my_name)+" WARNING: submitty_autograding shipper get_job time "+str(time_delta))

    return (my_job)
def prepare_autograding_and_submission_zip(
    which_machine,
    which_untrusted,
    next_directory,
    next_to_grade
):
    os.chdir(SUBMITTY_DATA_DIR)

    # generate a random id to be used to track this job in the autograding logs
    job_id = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(6))

    # --------------------------------------------------------
    # figure out what we're supposed to grade & error checking
    obj = load_queue_file_obj(job_id, next_directory, next_to_grade)
    # The top level course directory for this class
    course_dir = os.path.join(SUBMITTY_DATA_DIR, 'courses', obj["semester"], obj["course"])
    if "generate_output" not in obj:
        partial_path = os.path.join(obj["gradeable"], obj["who"], str(obj["version"]))
        item_name = os.path.join(obj["semester"], obj["course"], "submissions", partial_path)
        submission_path = os.path.join(SUBMITTY_DATA_DIR, "courses", item_name)
        if not os.path.isdir(submission_path):
            autograding_utils.log_message(
                AUTOGRADING_LOG_PATH,
                job_id,
                message=f"ERROR: the submission directory does not exist {submission_path}"
            )
            raise RuntimeError("ERROR: the submission directory does not exist", submission_path)
        print(which_machine, which_untrusted, "prepare zip", submission_path)
        is_vcs, vcs_type, vcs_base_url, vcs_subdirectory = get_vcs_info(
            SUBMITTY_DATA_DIR,
            obj["semester"],
            obj["course"],
            obj["gradeable"],
            obj["who"],
            obj["team"]
        )

    elif obj["generate_output"]:
        item_name = os.path.join(
            obj["semester"],
            obj["course"],
            "generated_output",
            obj["gradeable"]
        )

    is_batch_job = "regrade" in obj and obj["regrade"]

    queue_time = get_queue_time(next_directory, next_to_grade)
    grading_began = dateutils.get_current_time()
    waittime = (grading_began-queue_time).total_seconds()
    autograding_utils.log_message(
        AUTOGRADING_LOG_PATH,
        job_id,
        is_batch_job,
        "zip",
        item_name,
        "wait:",
        waittime,
        ""
    )

    # --------------------------------------------------------
    # various paths

    provided_code_path = os.path.join(course_dir, "provided_code", obj["gradeable"])
    instructor_solution_path = os.path.join(course_dir, "instructor_solution", obj["gradeable"])
    test_input_path = os.path.join(course_dir, "test_input", obj["gradeable"])
    test_output_path = os.path.join(course_dir, "test_output", obj["gradeable"])
    bin_path = os.path.join(course_dir, "bin", obj["gradeable"])
    form_json_config = os.path.join(course_dir, "config", "form", f"form_{obj['gradeable']}.json")
    custom_validation_code_path = os.path.join(
        course_dir,
        "custom_validation_code",
        obj["gradeable"]
    )
    generated_output_path = os.path.join(
        course_dir,
        "generated_output",
        obj["gradeable"],
        "random_output"
    )
    complete_config = os.path.join(
        course_dir,
        "config",
        "complete_config",
        f"complete_config_{obj['gradeable']}.json"
    )

    if not os.path.exists(form_json_config):
        autograding_utils.log_message(
            AUTOGRADING_LOG_PATH,
            job_id,
            message=f"ERROR: the form json file does not exist {form_json_config}"
        )
        raise RuntimeError(f"ERROR: the form json file does not exist {form_json_config}")

    if not os.path.exists(complete_config):
        autograding_utils.log_message(
            AUTOGRADING_LOG_PATH,
            job_id,
            message=f"ERROR: the complete config file does not exist {complete_config}"
        )
        raise RuntimeError(f"ERROR: the complete config file does not exist {complete_config}")

    # --------------------------------------------------------------------
    # MAKE TEMPORARY DIRECTORY & COPY THE NECESSARY FILES THERE
    tmp = tempfile.mkdtemp()
    tmp_autograding = os.path.join(tmp, "TMP_AUTOGRADING")
    os.mkdir(tmp_autograding)
    tmp_submission = os.path.join(tmp, "TMP_SUBMISSION")
    os.mkdir(tmp_submission)

    copytree_if_exists(provided_code_path, os.path.join(tmp_autograding, "provided_code"))
    copytree_if_exists(test_input_path, os.path.join(tmp_autograding, "test_input"))
    copytree_if_exists(test_output_path, os.path.join(tmp_autograding, "test_output"))
    copytree_if_exists(generated_output_path, os.path.join(tmp_autograding, "generated_output"))
    copytree_if_exists(bin_path, os.path.join(tmp_autograding, "bin"))
    copytree_if_exists(
        instructor_solution_path,
        os.path.join(tmp_autograding, "instructor_solution")
    )
    copytree_if_exists(
        custom_validation_code_path,
        os.path.join(tmp_autograding, "custom_validation_code")
    )

    # Copy the default submitty_router into bin.
    router_path = os.path.join(
        SUBMITTY_INSTALL_DIR,
        'src',
        'grading',
        'python',
        'submitty_router.py'
    )
    shutil.copy(router_path, os.path.join(tmp_autograding, "bin"))
    shutil.copy(form_json_config, os.path.join(tmp_autograding, "form.json"))
    shutil.copy(complete_config, os.path.join(tmp_autograding, "complete_config.json"))

    if "generate_output" not in obj:
        checkout_path = os.path.join(course_dir, "checkout", partial_path)
        results_path = os.path.join(course_dir, "results", partial_path)
    elif obj["generate_output"]:
        results_path = os.path.join(course_dir, "generated_output", obj["gradeable"])

    # grab a copy of the current history.json file (if it exists)
    history_file = os.path.join(results_path, "history.json")
    if os.path.isfile(history_file):
        shutil.copy(history_file, os.path.join(tmp_submission, "history.json"))

    # switch to tmp directory
    os.chdir(tmp)

    # make the logs directory
    tmp_logs = os.path.join(tmp, "TMP_SUBMISSION", "tmp_logs")
    os.makedirs(tmp_logs)
    # 'touch' a file in the logs folder
    open(os.path.join(tmp_logs, "overall.txt"), 'a')

    # --------------------------------------------------------------------
    # CONFIRM WE HAVE A CHECKOUT OF THE STUDENT'S REPO
    if "generate_output" not in obj:
        if is_vcs:
            # there should be a checkout log file in the results directory
            # move that file to the tmp logs directory.
            vcs_checkout_logfile = os.path.join(results_path, "logs", "vcs_checkout.txt")
            if os.path.isfile(vcs_checkout_logfile):
                shutil.move(vcs_checkout_logfile, tmp_logs)
            else:
                autograding_utils.log_message(
                    AUTOGRADING_LOG_PATH,
                    job_id,
                    message=f"ERROR: missing vcs_checkout.txt logfile {str(vcs_checkout_logfile)}"
                )

    if "generate_output" not in obj:
        copytree_if_exists(submission_path, os.path.join(tmp_submission, "submission"))
        copytree_if_exists(checkout_path, os.path.join(tmp_submission, "checkout"))
    obj["queue_time"] = dateutils.write_submitty_date(queue_time)
    obj["regrade"] = is_batch_job
    obj["waittime"] = waittime
    obj["job_id"] = job_id

    with open(os.path.join(tmp_submission, "queue_file.json"), 'w') as outfile:
        json.dump(obj, outfile, sort_keys=True, indent=4, separators=(',', ': '))

    user_assignment_access_json = os.path.join(
        SUBMITTY_DATA_DIR, "courses", obj["semester"], obj["course"],
        "submissions", obj["gradeable"], obj["who"], "user_assignment_access.json")
    user_assignment_settings_json = os.path.join(
        SUBMITTY_DATA_DIR, "courses", obj["semester"], obj["course"],
        "submissions", obj["gradeable"], obj["who"], "user_assignment_settings.json")

    if os.path.exists(user_assignment_access_json):
        shutil.copy(
            user_assignment_access_json,
            os.path.join(tmp_submission, "user_assignment_access.json")
        )
    if os.path.exists(user_assignment_settings_json):
        shutil.copy(
            user_assignment_settings_json,
            os.path.join(tmp_submission, "user_assignment_settings.json")
        )

    grading_began_longstring = dateutils.write_submitty_date(grading_began)
    with open(os.path.join(tmp_submission, ".grading_began"), 'w') as f:
        print(grading_began_longstring, file=f)

    # zip up autograding & submission folders
    filehandle1, my_autograding_zip_file = tempfile.mkstemp()
    filehandle2, my_submission_zip_file = tempfile.mkstemp()
    autograding_utils.zip_my_directory(tmp_autograding, my_autograding_zip_file)
    autograding_utils.zip_my_directory(tmp_submission, my_submission_zip_file)
    os.close(filehandle1)
    os.close(filehandle2)
    # cleanup
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp)

    return (my_autograding_zip_file, my_submission_zip_file)
def get_job(my_name,which_machine,my_capabilities,which_untrusted,overall_lock):
    """
    Picks a job from the queue

    :param overall_lock: a lock on the directory containing all queue files
    """

    time_get_job_begin = dateutils.get_current_time()

    overall_lock.acquire()
    folder= INTERACTIVE_QUEUE

    # Grab all the files currently in the folder, sorted by creation
    # time, and put them in the queue to be graded
    files = glob.glob(os.path.join(folder, "*"))
    files_and_times = list()
    for f in files:
        try:
            my_time = os.path.getctime(f)
        except:
            continue
        tup = (f, my_time)
        files_and_times.append(tup)

    files_and_times = sorted(files_and_times, key=operator.itemgetter(1))
    my_job=""

    for full_path_file, file_time in files_and_times:
        # get the file name (without the path)
        just_file = full_path_file[len(folder)+1:]
        # skip items that are already being graded
        if (just_file[0:8]=="GRADING_"):
            continue
        grading_file = os.path.join(folder,"GRADING_"+just_file)
        if grading_file in files:
            continue

        # found something to do
        try:
            with open(full_path_file, 'r') as infile:
                queue_obj = json.load(infile)
        except:
            continue

        #Check to make sure that we are capable of grading this submission
        required_capabilities = queue_obj["required_capabilities"]
        if not required_capabilities in my_capabilities:
            continue

        # prioritize interactive jobs over (batch) regrades
        # if you've found an interactive job, exit early (since they are sorted by timestamp)
        if not "regrade" in queue_obj or not queue_obj["regrade"]:
            my_job = just_file
            break

        # otherwise it's a regrade, and if we don't already have a
        # job, take it, but we have to search the rest of the list
        if my_job == "":
            my_job = just_file

    if not my_job == "":
        grading_file = os.path.join(folder, "GRADING_" + my_job)
        # create the grading file
        with open(os.path.join(grading_file), "w") as queue_file:
            json.dump({"untrusted": which_untrusted}, queue_file)

    overall_lock.release()

    time_get_job_end = dateutils.get_current_time()

    time_delta = time_get_job_end-time_get_job_begin
    if time_delta > datetime.timedelta(milliseconds=100):
        print (my_name, " WARNING: submitty_autograding shipper get_job time ", time_delta)
        grade_items_logging.log_message(JOB_ID, message=str(my_name)+" WARNING: submitty_autograding shipper get_job time "+str(time_delta))

    return (my_job)
Esempio n. 32
0
def archive_autograding_results(config, working_directory: os.PathLike,
                                job_id: str, which_untrusted: str,
                                is_batch_job: bool, complete_config_obj: dict,
                                gradeable_config_obj: dict, queue_obj: dict,
                                is_test_environment: bool):
    """ After grading is finished, archive the results. """

    tmp_autograding = os.path.join(working_directory, "TMP_AUTOGRADING")
    tmp_submission = os.path.join(working_directory, "TMP_SUBMISSION")
    tmp_work = os.path.join(working_directory, "TMP_WORK")
    tmp_logs = os.path.join(working_directory, "TMP_SUBMISSION", "tmp_logs")
    tmp_results = os.path.join(working_directory, "TMP_RESULTS")
    submission_path = os.path.join(tmp_submission, "submission")
    random_output_path = os.path.join(tmp_work, 'random_output')

    if "generate_output" not in queue_obj:
        partial_path = os.path.join(queue_obj["gradeable"], queue_obj["who"],
                                    str(queue_obj["version"]))
        item_name = os.path.join(queue_obj["semester"], queue_obj["course"],
                                 "submissions", partial_path)
    elif queue_obj["generate_output"]:
        item_name = os.path.join(queue_obj["semester"], queue_obj["course"],
                                 "generated_output", queue_obj["gradeable"])
    results_public_dir = os.path.join(tmp_results, "results_public")
    results_details_dir = os.path.join(tmp_results, "details")
    patterns = complete_config_obj['autograding']

    # Copy work to details
    pattern_copy("work_to_details", patterns['work_to_details'], tmp_work,
                 results_details_dir, tmp_logs)

    # Copy work to public
    if 'work_to_public' in patterns:
        pattern_copy("work_to_public", patterns['work_to_public'], tmp_work,
                     results_public_dir, tmp_logs)

    if os.path.exists(random_output_path):
        pattern_copy("work_to_random_output", [
            os.path.join(random_output_path, '**', '*.txt'),
        ], tmp_work, tmp_results, tmp_logs)
    # timestamp of first access to the gradeable page
    first_access_string = ""
    # grab the submission time
    if "generate_output" in queue_obj and queue_obj["generate_output"]:
        submission_string = ""
    else:
        with open(
                os.path.join(tmp_submission, 'submission',
                             ".submit.timestamp"),
                'r') as submission_time_file:
            submission_string = submission_time_file.read().rstrip()
        # grab the first access to the gradeable page (if it exists)
        user_assignment_access_filename = os.path.join(
            tmp_submission, ".user_assignment_access.json")
        if os.path.exists(user_assignment_access_filename):
            with open(user_assignment_access_filename, 'r') as access_file:
                obj = json.load(access_file)
                first_access_string = obj[0]["timestamp"]

    history_file_tmp = os.path.join(tmp_submission, "history.json")
    history_file = os.path.join(tmp_results, "history.json")
    if os.path.isfile(history_file_tmp) and not is_test_environment:
        shutil.move(history_file_tmp, history_file)
        # fix permissions
        ta_group_id = os.stat(tmp_results).st_gid
        os.chown(history_file, int(config.submitty_users['daemon_uid']),
                 ta_group_id)
        add_permissions(history_file, stat.S_IRGRP)
    grading_finished = dateutils.get_current_time()
    grade_result = ""
    if "generate_output" not in queue_obj:
        try:
            shutil.copy(os.path.join(tmp_work, "grade.txt"), tmp_results)
            with open(os.path.join(tmp_work, "grade.txt")) as f:
                lines = f.readlines()
                for line in lines:
                    line = line.rstrip('\n')
                    if line.startswith("Automatic grading total:"):
                        grade_result = line
        except Exception as e:
            with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
                f.write(
                    f"\n\nERROR: Grading incomplete -- Could not process {os.path.join(tmp_work,'grade.txt')}"
                )
            config.logger.log_message(
                "ERROR: could not process grade.txt. See stack trace entry for more details.",
                job_id=job_id,
                is_batch=is_batch_job,
                which_untrusted=which_untrusted,
                jobname=item_name,
            )
            config.logger.log_stack_trace(
                traceback.format_exc(),
                job_id=job_id,
                is_batch=is_batch_job,
                which_untrusted=which_untrusted,
                jobname=item_name,
            )

        submission_datetime = dateutils.read_submitty_date(submission_string)
        submission_longstring = dateutils.write_submitty_date(
            submission_datetime)

        # compute lateness (if there is a due date / submission deadline)
        gradeable_deadline_string = gradeable_config_obj["date_due"]
        if gradeable_deadline_string is None:
            print("NO DEADLINE")
            gradeable_deadline_longstring = "None"
            seconds_late = 0
        else:
            print("DEADLINE IS '" + str(gradeable_deadline_string) + "'")
            gradeable_deadline_datetime = dateutils.read_submitty_date(
                gradeable_deadline_string)
            gradeable_deadline_longstring = dateutils.write_submitty_date(
                gradeable_deadline_datetime)
            seconds_late = int((submission_datetime -
                                gradeable_deadline_datetime).total_seconds())

        # compute the access duration in seconds (if it exists)
        access_duration = -1
        if first_access_string != "":
            first_access_datetime = dateutils.read_submitty_date(
                first_access_string)
            access_duration = int(
                (submission_datetime - first_access_datetime).total_seconds())

        # note: negative = not late
        grading_finished_longstring = dateutils.write_submitty_date(
            grading_finished)

        with open(os.path.join(tmp_submission, ".grading_began"), 'r') as f:
            grading_began_longstring = f.read()
        grading_began = dateutils.read_submitty_date(grading_began_longstring)

        gradingtime = (grading_finished - grading_began).total_seconds()

        queue_obj["gradingtime"] = gradingtime
        queue_obj["grade_result"] = grade_result
        queue_obj["which_untrusted"] = which_untrusted
        waittime = queue_obj["waittime"]

        try:

            # Make certain results.json is utf-8 encoded.
            results_json_path = os.path.join(tmp_work, 'results.json')
            with codecs.open(results_json_path,
                             'r',
                             encoding='utf-8',
                             errors='ignore') as infile:
                results_str = "".join(line.rstrip() for line in infile)
                results_obj = json.loads(results_str)
            with open(results_json_path, 'w') as outfile:
                json.dump(results_obj, outfile, indent=4)

            shutil.move(results_json_path,
                        os.path.join(tmp_results, "results.json"))
        except Exception as e:
            with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
                f.write(
                    f"\n\nERROR: Grading incomplete -- Could not open/write {os.path.join(tmp_work,'results.json')}"
                )
            config.logger.log_message(
                "ERROR: results.json read/write error",
                job_id=job_id,
                is_batch=is_batch_job,
                which_untrusted=which_untrusted,
                jobname=item_name,
            )
            config.logger.log_stack_trace(
                traceback.format_exc(),
                job_id=job_id,
                is_batch=is_batch_job,
                which_untrusted=which_untrusted,
                jobname=item_name,
            )

        # Rescue custom validator files
        custom_validator_output_directory = os.path.join(
            tmp_results, "custom_validator_output")
        pattern_copy("rescue_custom_validator_validation_jsons", [
            os.path.join(tmp_work, 'validation_results_*.json'),
        ], tmp_work, custom_validator_output_directory, tmp_logs)
        pattern_copy("rescue_custom_validator_logs", [
            os.path.join(tmp_work, 'validation_logfile_*.txt'),
        ], tmp_work, custom_validator_output_directory, tmp_logs)
        pattern_copy("rescue_custom_validator_errors", [
            os.path.join(tmp_work, 'validation_stderr_*.txt'),
        ], tmp_work, custom_validator_output_directory, tmp_logs)

        just_write_grade_history(history_file, gradeable_deadline_longstring,
                                 submission_longstring, seconds_late,
                                 first_access_string, access_duration,
                                 queue_obj["queue_time"],
                                 "BATCH" if is_batch_job else "INTERACTIVE",
                                 grading_began_longstring,
                                 int(waittime), grading_finished_longstring,
                                 int(gradingtime), grade_result,
                                 queue_obj.get("revision", None))

        with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
            f.write("FINISHED GRADING!\n")

        config.logger.log_message(grade_result,
                                  job_id=job_id,
                                  is_batch=is_batch_job,
                                  which_untrusted=which_untrusted,
                                  jobname=item_name,
                                  timelabel="grade:",
                                  elapsed_time=gradingtime)

    with open(os.path.join(tmp_results, "queue_file.json"), 'w') as outfile:
        json.dump(queue_obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    # save the logs!
    shutil.copytree(tmp_logs, os.path.join(tmp_results, "logs"))

    # Save the .submit.notebook
    # Copy the .submit.notebook to tmp_work for validation
    submit_notebook_path = os.path.join(tmp_submission, 'submission',
                                        ".submit.notebook")
    if os.path.exists(submit_notebook_path):
        shutil.copy(submit_notebook_path,
                    os.path.join(tmp_results, ".submit.notebook"))
Esempio n. 33
0
def archive_autograding_results(working_directory, job_id, which_untrusted, is_batch_job, complete_config_obj, 
                                gradeable_config_obj, queue_obj, log_path, stack_trace_log_path, is_test_environment):
    """ After grading is finished, archive the results. """

    tmp_autograding = os.path.join(working_directory,"TMP_AUTOGRADING")
    tmp_submission = os.path.join(working_directory,"TMP_SUBMISSION")
    tmp_work = os.path.join(working_directory,"TMP_WORK")
    tmp_logs = os.path.join(working_directory,"TMP_SUBMISSION","tmp_logs")
    tmp_results = os.path.join(working_directory,"TMP_RESULTS")
    submission_path = os.path.join(tmp_submission, "submission")
    random_output_path = os.path.join(tmp_work, 'random_output')

    if "generate_output" not in queue_obj:
        partial_path = os.path.join(queue_obj["gradeable"],queue_obj["who"],str(queue_obj["version"]))
        item_name = os.path.join(queue_obj["semester"],queue_obj["course"],"submissions",partial_path)
    elif queue_obj["generate_output"]:
        item_name = os.path.join(queue_obj["semester"],queue_obj["course"],"generated_output",queue_obj["gradeable"])
    results_public_dir = os.path.join(tmp_results,"results_public")
    results_details_dir = os.path.join(tmp_results, "details")
    patterns = complete_config_obj['autograding']

    # Copy work to details
    pattern_copy("work_to_details", patterns['work_to_details'], tmp_work, results_details_dir, tmp_logs)
    
    # Copy work to public
    if 'work_to_public' in patterns:
        pattern_copy("work_to_public", patterns['work_to_public'], tmp_work, results_public_dir, tmp_logs)

    if os.path.exists(random_output_path):
        pattern_copy("work_to_random_output", [os.path.join(random_output_path, 'test*', '**', '*.txt'),], tmp_work, tmp_results, tmp_logs)
    # timestamp of first access to the gradeable page
    first_access_string = ""
    # grab the submission time
    if "generate_output" in queue_obj and queue_obj["generate_output"]:
        submission_string = ""
    else:
        with open(os.path.join(tmp_submission, 'submission' ,".submit.timestamp"), 'r') as submission_time_file:
            submission_string = submission_time_file.read().rstrip()
        # grab the first access to the gradeable page (if it exists)
        user_assignment_access_filename = os.path.join(tmp_submission, "user_assignment_access.json")
        if os.path.exists(user_assignment_access_filename):
            with open(user_assignment_access_filename, 'r') as access_file:
                obj = json.load(access_file, object_pairs_hook=collections.OrderedDict)
                first_access_string = obj["page_load_history"][0]["time"]

    history_file_tmp = os.path.join(tmp_submission,"history.json")
    history_file = os.path.join(tmp_results,"history.json")
    if os.path.isfile(history_file_tmp) and not is_test_environment:

        from . import CONFIG_PATH
        with open(os.path.join(CONFIG_PATH, 'submitty_users.json')) as open_file:
            OPEN_JSON = json.load(open_file)
        DAEMON_UID = OPEN_JSON['daemon_uid']

        shutil.move(history_file_tmp, history_file)
        # fix permissions
        ta_group_id = os.stat(tmp_results).st_gid
        os.chown(history_file, int(DAEMON_UID),ta_group_id)
        add_permissions(history_file, stat.S_IRGRP)
    grading_finished = dateutils.get_current_time()

    if "generate_output" not in queue_obj:
        try:
            shutil.copy(os.path.join(tmp_work, "grade.txt"), tmp_results)
        except:
            with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
                print ("\n\nERROR: Grading incomplete -- Could not copy ",os.path.join(tmp_work,"grade.txt"))
            log_message(log_path, job_id, is_batch_job, which_untrusted, item_name, message="ERROR: grade.txt does not exist")
            log_stack_trace(stack_trace_log_path, job_id, is_batch_job, which_untrusted, item_name, trace=traceback.format_exc())

        grade_result = ""
        try:
            with open(os.path.join(tmp_work,"grade.txt")) as f:
                lines = f.readlines()
                for line in lines:
                    line = line.rstrip('\n')
                    if line.startswith("Automatic grading total:"):
                        grade_result = line
        except:
            with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
                print ("\n\nERROR: Grading incomplete -- Could not open ",os.path.join(tmp_work,"grade.txt"))
                log_message(job_id,is_batch_job,which_untrusted,item_name,message="ERROR: grade.txt does not exist")
                log_stack_trace(job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())


        gradeable_deadline_string = gradeable_config_obj["date_due"]

        # FIXME: The access date string is currently misformatted
        #    mm-dd-yyyy, but we want yyyy-mm-dd.  Also it is missing
        #    the common name timezone string, e.g., "America/NewYork".
        #    We should standardize this logging eventually, but
        #    keeping it as is because we are mid-semester with this
        #    new feature and I don't want to break things.
        first_access_string = dateutils.normalize_submitty_date(first_access_string)
        
        submission_datetime = dateutils.read_submitty_date(submission_string)
        gradeable_deadline_datetime = dateutils.read_submitty_date(gradeable_deadline_string)
        gradeable_deadline_longstring = dateutils.write_submitty_date(gradeable_deadline_datetime)
        submission_longstring = dateutils.write_submitty_date(submission_datetime)
        seconds_late = int((submission_datetime-gradeable_deadline_datetime).total_seconds())
        # compute the access duration in seconds (if it exists)
        access_duration = -1
        if first_access_string != "":
            first_access_datetime = dateutils.read_submitty_date(first_access_string)
            access_duration = int((submission_datetime-first_access_datetime).total_seconds())

        # note: negative = not late
        grading_finished_longstring = dateutils.write_submitty_date(grading_finished)

        with open(os.path.join(tmp_submission,".grading_began"), 'r') as f:
            grading_began_longstring = f.read()
        grading_began = dateutils.read_submitty_date(grading_began_longstring)

        gradingtime = (grading_finished - grading_began).total_seconds()

        queue_obj["gradingtime"]=gradingtime
        queue_obj["grade_result"]=grade_result
        queue_obj["which_untrusted"]=which_untrusted
        waittime = queue_obj["waittime"]

        try:

            # Make certain results.json is utf-8 encoded.
            results_json_path = os.path.join(tmp_work, 'results.json')
            with codecs.open(results_json_path, 'r', encoding='utf-8', errors='ignore') as infile:
                results_str = "".join(line.rstrip() for line in infile)
                results_obj = json.loads(results_str)
            with open(results_json_path, 'w') as outfile:
                json.dump(results_obj, outfile, indent=4)

            shutil.move(results_json_path, os.path.join(tmp_results, "results.json"))
        except:
            with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
                print ("\n\nERROR: Grading incomplete -- Could not open/write ",os.path.join(tmp_work,"results.json"))
                log_message(log_path, job_id,is_batch_job,which_untrusted,item_name,message="ERROR: results.json read/write error")
                log_stack_trace(stack_trace_log_path, job_id,is_batch_job,which_untrusted,item_name,trace=traceback.format_exc())

        # Rescue custom validator files
        custom_validator_output_directory = os.path.join(tmp_results, "custom_validator_output")
        pattern_copy("rescue_custom_validator_validation_jsons", [os.path.join(tmp_work, 'validation_results_*.json'),], tmp_work, custom_validator_output_directory, tmp_logs)
        pattern_copy("rescue_custom_validator_logs", [os.path.join(tmp_work, 'validation_logfile_*.txt'),], tmp_work, custom_validator_output_directory, tmp_logs)
        pattern_copy("rescue_custom_validator_errors", [os.path.join(tmp_work, 'validation_stderr_*.txt'),], tmp_work, custom_validator_output_directory, tmp_logs)

        just_write_grade_history(history_file,
                                gradeable_deadline_longstring,
                                submission_longstring,
                                seconds_late,
                                first_access_string,
                                access_duration,
                                queue_obj["queue_time"],
                                "BATCH" if is_batch_job else "INTERACTIVE",
                                grading_began_longstring,
                                int(waittime),
                                grading_finished_longstring,
                                int(gradingtime),
                                grade_result,
                                queue_obj.get("revision", None))

        with open(os.path.join(tmp_logs,"overall.txt"),'a') as f:
            f.write("FINISHED GRADING!\n")
        
        log_message(log_path, job_id,is_batch_job,which_untrusted,item_name,"grade:",gradingtime,grade_result)

    with open(os.path.join(tmp_results,"queue_file.json"),'w') as outfile:
        json.dump(queue_obj,outfile,sort_keys=True,indent=4,separators=(',', ': '))

    # save the logs!
    shutil.copytree(tmp_logs,os.path.join(tmp_results,"logs"))
Esempio n. 34
0
def prepare_autograding_and_submission_zip(which_machine,which_untrusted,next_directory,next_to_grade):
    os.chdir(SUBMITTY_DATA_DIR)

    # generate a random id to be used to track this job in the autograding logs
    job_id = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(6))

    # --------------------------------------------------------
    # figure out what we're supposed to grade & error checking
    obj = load_queue_file_obj(job_id,next_directory,next_to_grade)

    partial_path = os.path.join(obj["gradeable"],obj["who"],str(obj["version"]))
    item_name = os.path.join(obj["semester"],obj["course"],"submissions",partial_path)
    submission_path = os.path.join(SUBMITTY_DATA_DIR,"courses",item_name)
    if not os.path.isdir(submission_path):
        grade_items_logging.log_message(job_id, message="ERROR: the submission directory does not exist " + submission_path)
        raise RuntimeError("ERROR: the submission directory does not exist", submission_path)
    print(which_machine,which_untrusted,"prepare zip",submission_path)
    is_vcs,vcs_type,vcs_base_url,vcs_subdirectory = get_vcs_info(SUBMITTY_DATA_DIR,obj["semester"],obj["course"],obj["gradeable"],obj["who"],obj["team"])

    is_batch_job = "regrade" in obj and obj["regrade"]
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"

    queue_time = get_queue_time(next_directory,next_to_grade)
    queue_time_longstring = dateutils.write_submitty_date(queue_time)
    grading_began = dateutils.get_current_time()
    waittime = (grading_began-queue_time).total_seconds()
    grade_items_logging.log_message(job_id,is_batch_job,"zip",item_name,"wait:",waittime,"")

    # --------------------------------------------------------
    # various paths
    provided_code_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"provided_code",obj["gradeable"])
    test_input_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"test_input",obj["gradeable"])
    test_output_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"test_output",obj["gradeable"])
    custom_validation_code_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"custom_validation_code",obj["gradeable"])
    bin_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"bin",obj["gradeable"])
    form_json_config = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"config","form","form_"+obj["gradeable"]+".json")
    complete_config = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"config","complete_config","complete_config_"+obj["gradeable"]+".json")

    if not os.path.exists(form_json_config):
        grade_items_logging.log_message(job_id,message="ERROR: the form json file does not exist " + form_json_config)
        raise RuntimeError("ERROR: the form json file does not exist ",form_json_config)
    if not os.path.exists(complete_config):
        grade_items_logging.log_message(job_id,message="ERROR: the complete config file does not exist " + complete_config)
        raise RuntimeError("ERROR: the complete config file does not exist ",complete_config)

    # --------------------------------------------------------------------
    # MAKE TEMPORARY DIRECTORY & COPY THE NECESSARY FILES THERE
    tmp = tempfile.mkdtemp()
    tmp_autograding = os.path.join(tmp,"TMP_AUTOGRADING")
    os.mkdir(tmp_autograding)
    tmp_submission = os.path.join(tmp,"TMP_SUBMISSION")
    os.mkdir(tmp_submission)

    copytree_if_exists(provided_code_path,os.path.join(tmp_autograding,"provided_code"))
    copytree_if_exists(test_input_path,os.path.join(tmp_autograding,"test_input"))
    copytree_if_exists(test_output_path,os.path.join(tmp_autograding,"test_output"))
    copytree_if_exists(custom_validation_code_path,os.path.join(tmp_autograding,"custom_validation_code"))
    copytree_if_exists(bin_path,os.path.join(tmp_autograding,"bin"))
    shutil.copy(form_json_config,os.path.join(tmp_autograding,"form.json"))
    shutil.copy(complete_config,os.path.join(tmp_autograding,"complete_config.json"))

    checkout_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"checkout",partial_path)
    results_path = os.path.join(SUBMITTY_DATA_DIR,"courses",obj["semester"],obj["course"],"results",partial_path)

    # grab a copy of the current history.json file (if it exists)
    history_file = os.path.join(results_path,"history.json")
    history_file_tmp = ""
    if os.path.isfile(history_file):
        shutil.copy(history_file,os.path.join(tmp_submission,"history.json"))
    # get info from the gradeable config file
    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)

    checkout_subdirectory = complete_config_obj["autograding"].get("use_checkout_subdirectory","")
    checkout_subdir_path = os.path.join(checkout_path,checkout_subdirectory)
    queue_file = os.path.join(next_directory,next_to_grade)

    # switch to tmp directory
    os.chdir(tmp)

    # make the logs directory
    tmp_logs = os.path.join(tmp,"TMP_SUBMISSION","tmp_logs")
    os.makedirs(tmp_logs)
    # 'touch' a file in the logs folder
    open(os.path.join(tmp_logs,"overall.txt"), 'a')

    # --------------------------------------------------------------------
    # CONFIRM WE HAVE A CHECKOUT OF THE STUDENT'S REPO
    if is_vcs:
        # there should be a checkout log file in the results directory
        # move that file to the tmp logs directory..
        vcs_checkout_logfile = os.path.join(results_path,"logs","vcs_checkout.txt")
        if os.path.isfile(vcs_checkout_logfile):
            shutil.move(vcs_checkout_logfile,tmp_logs)
        else:
            grade_items_logging.log_message(JOB_ID, message=str(my_name)+" ERROR: missing vcs_checkout.txt logfile "+str(vcs_checkout_logfile))


    copytree_if_exists(submission_path,os.path.join(tmp_submission,"submission"))
    copytree_if_exists(checkout_path,os.path.join(tmp_submission,"checkout"))
    obj["queue_time"] = queue_time_longstring
    obj["regrade"] = is_batch_job
    obj["waittime"] = waittime
    obj["job_id"] = job_id

    with open(os.path.join(tmp_submission,"queue_file.json"),'w') as outfile:
        json.dump(obj,outfile,sort_keys=True,indent=4,separators=(',', ': '))

    grading_began_longstring = dateutils.write_submitty_date(grading_began)
    with open(os.path.join(tmp_submission,".grading_began"), 'w') as f:
        print (grading_began_longstring,file=f)

    # zip up autograding & submission folders
    filehandle1, my_autograding_zip_file =tempfile.mkstemp()
    filehandle2, my_submission_zip_file =tempfile.mkstemp()
    grade_item.zip_my_directory(tmp_autograding,my_autograding_zip_file)
    grade_item.zip_my_directory(tmp_submission,my_submission_zip_file)
    os.close(filehandle1)
    os.close(filehandle2)
    # cleanup
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp)

    #grade_items_logging.log_message(job_id,is_batch_job,"done zip",item_name)

    return (my_autograding_zip_file,my_submission_zip_file)
Esempio n. 35
0
def grade_from_zip(my_autograding_zip_file, my_submission_zip_file,
                   which_untrusted):
    os.chdir(SUBMITTY_DATA_DIR)
    tmp = os.path.join("/var/local/submitty/autograding_tmp/", which_untrusted,
                       "tmp")

    # clean up old usage of this directory
    shutil.rmtree(tmp, ignore_errors=True)
    os.makedirs(tmp)

    which_machine = socket.gethostname()

    # unzip autograding and submission folders
    tmp_autograding = os.path.join(tmp, "TMP_AUTOGRADING")
    tmp_submission = os.path.join(tmp, "TMP_SUBMISSION")
    unzip_this_file(my_autograding_zip_file, tmp_autograding)
    unzip_this_file(my_submission_zip_file, tmp_submission)
    os.remove(my_autograding_zip_file)
    os.remove(my_submission_zip_file)

    tmp_logs = os.path.join(tmp, "TMP_SUBMISSION", "tmp_logs")

    queue_file = os.path.join(tmp_submission, "queue_file.json")
    with open(queue_file, 'r') as infile:
        queue_obj = json.load(infile)

    queue_time_longstring = queue_obj["queue_time"]
    waittime = queue_obj["waittime"]
    is_batch_job = queue_obj["regrade"]
    job_id = queue_obj["job_id"]
    is_batch_job_string = "BATCH" if is_batch_job else "INTERACTIVE"

    partial_path = os.path.join(queue_obj["gradeable"], queue_obj["who"],
                                str(queue_obj["version"]))
    item_name = os.path.join(queue_obj["semester"], queue_obj["course"],
                             "submissions", partial_path)

    grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                    item_name, "wait:", waittime, "")

    # --------------------------------------------------------------------
    # START DOCKER

    # WIP: This option file facilitated testing...
    #USE_DOCKER = os.path.isfile("/tmp/use_docker")
    #use_docker_string="grading begins, using DOCKER" if USE_DOCKER else "grading begins (not using docker)"
    #grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,message=use_docker_string)

    container = None
    if USE_DOCKER:
        container = subprocess.check_output([
            'docker', 'run', '-t', '-d', '-v', tmp + ':' + tmp, 'ubuntu:custom'
        ]).decode('utf8').strip()
        dockerlaunch_done = dateutils.get_current_time()
        dockerlaunch_time = (dockerlaunch_done - grading_began).total_seconds()
        grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                        submission_path, "dcct:",
                                        dockerlaunch_time,
                                        "docker container created")

    # --------------------------------------------------------------------
    # COMPILE THE SUBMITTED CODE

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nCOMPILATION STARTS",
              file=f)

    # copy submitted files to the tmp compilation directory
    tmp_compilation = os.path.join(tmp, "TMP_COMPILATION")
    os.mkdir(tmp_compilation)
    os.chdir(tmp_compilation)

    submission_path = os.path.join(tmp_submission, "submission")
    checkout_path = os.path.join(tmp_submission, "checkout")

    provided_code_path = os.path.join(tmp_autograding, "provided_code")
    test_input_path = os.path.join(tmp_autograding, "test_input")
    test_output_path = os.path.join(tmp_autograding, "test_output")
    custom_validation_code_path = os.path.join(tmp_autograding,
                                               "custom_validation_code")
    bin_path = os.path.join(tmp_autograding, "bin")
    form_json_config = os.path.join(tmp_autograding, "form.json")
    complete_config = os.path.join(tmp_autograding, "complete_config.json")

    with open(form_json_config, 'r') as infile:
        gradeable_config_obj = json.load(infile)
    gradeable_deadline_string = gradeable_config_obj["date_due"]

    with open(complete_config, 'r') as infile:
        complete_config_obj = json.load(infile)
    patterns_submission_to_compilation = complete_config_obj["autograding"][
        "submission_to_compilation"]
    pattern_copy("submission_to_compilation",
                 patterns_submission_to_compilation, submission_path,
                 tmp_compilation, tmp_logs)

    is_vcs = gradeable_config_obj["upload_type"] == "repository"
    checkout_subdirectory = complete_config_obj["autograding"].get(
        "use_checkout_subdirectory", "")
    checkout_subdir_path = os.path.join(checkout_path, checkout_subdirectory)

    if is_vcs:
        pattern_copy("checkout_to_compilation",
                     patterns_submission_to_compilation, checkout_subdir_path,
                     tmp_compilation, tmp_logs)

    # copy any instructor provided code files to tmp compilation directory
    copy_contents_into(job_id, provided_code_path, tmp_compilation, tmp_logs)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy compile.out to the current directory
    shutil.copy(os.path.join(bin_path, "compile.out"),
                os.path.join(tmp_compilation, "my_compile.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_compilation,
                              stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP,
                              stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP,
                              stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP)

    add_permissions(tmp, stat.S_IROTH | stat.S_IXOTH)
    add_permissions(tmp_logs, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)

    # grab the submission time
    with open(os.path.join(submission_path, ".submit.timestamp"),
              'r') as submission_time_file:
        submission_string = submission_time_file.read().rstrip()

    with open(os.path.join(tmp_logs, "compilation_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            compile_success = subprocess.call([
                'docker', 'exec', '-w', tmp_compilation, container,
                os.path.join(tmp_compilation, 'my_compile.out'),
                queue_obj['gradeable'], queue_obj['who'],
                str(queue_obj['version']), submission_string
            ],
                                              stdout=logfile)
        else:
            compile_success = subprocess.call([
                os.path.join(SUBMITTY_INSTALL_DIR, "sbin",
                             "untrusted_execute"), which_untrusted,
                os.path.join(tmp_compilation, "my_compile.out"),
                queue_obj["gradeable"], queue_obj["who"],
                str(queue_obj["version"]), submission_string
            ],
                                              stdout=logfile)

    if compile_success == 0:
        print(which_machine, which_untrusted, "COMPILATION OK")
    else:
        print(which_machine, which_untrusted, "COMPILATION FAILURE")
        grade_items_logging.log_message(job_id,
                                        is_batch_job,
                                        which_untrusted,
                                        item_name,
                                        message="COMPILATION FAILURE")

    untrusted_grant_rwx_access(which_untrusted, tmp_compilation)

    # remove the compilation program
    os.remove(os.path.join(tmp_compilation, "my_compile.out"))

    # return to the main tmp directory
    os.chdir(tmp)

    # --------------------------------------------------------------------
    # make the runner directory

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nRUNNER STARTS", file=f)

    tmp_work = os.path.join(tmp, "TMP_WORK")
    os.makedirs(tmp_work)
    os.chdir(tmp_work)

    # move all executable files from the compilation directory to the main tmp directory
    # Note: Must preserve the directory structure of compiled files (esp for Java)

    patterns_submission_to_runner = complete_config_obj["autograding"][
        "submission_to_runner"]
    pattern_copy("submission_to_runner", patterns_submission_to_runner,
                 submission_path, tmp_work, tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_runner", patterns_submission_to_runner,
                     checkout_subdir_path, tmp_work, tmp_logs)

    patterns_compilation_to_runner = complete_config_obj["autograding"][
        "compilation_to_runner"]
    pattern_copy("compilation_to_runner", patterns_compilation_to_runner,
                 tmp_compilation, tmp_work, tmp_logs)

    # copy input files to tmp_work directory
    copy_contents_into(job_id, test_input_path, tmp_work, tmp_logs)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy runner.out to the current directory
    shutil.copy(os.path.join(bin_path, "run.out"),
                os.path.join(tmp_work, "my_runner.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_work,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    # run the run.out as the untrusted user
    with open(os.path.join(tmp_logs, "runner_log.txt"), 'w') as logfile:
        print("LOGGING BEGIN my_runner.out", file=logfile)
        logfile.flush()

        try:
            if USE_DOCKER:
                runner_success = subprocess.call([
                    'docker', 'exec', '-w', tmp_work, container,
                    os.path.join(tmp_work, 'my_runner.out'),
                    queue_obj['gradeable'], queue_obj['who'],
                    str(queue_obj['version']), submission_string
                ],
                                                 stdout=logfile)
            else:
                runner_success = subprocess.call([
                    os.path.join(SUBMITTY_INSTALL_DIR, "sbin",
                                 "untrusted_execute"), which_untrusted,
                    os.path.join(tmp_work, "my_runner.out"),
                    queue_obj["gradeable"], queue_obj["who"],
                    str(queue_obj["version"]), submission_string
                ],
                                                 stdout=logfile)
            logfile.flush()
        except Exception as e:
            print("ERROR caught runner.out exception={0}".format(str(
                e.args[0])).encode("utf-8"),
                  file=logfile)
            logfile.flush()

        print("LOGGING END my_runner.out", file=logfile)
        logfile.flush()

        killall_success = subprocess.call([
            os.path.join(SUBMITTY_INSTALL_DIR, "sbin", "untrusted_execute"),
            which_untrusted,
            os.path.join(SUBMITTY_INSTALL_DIR, "sbin", "killall.py")
        ],
                                          stdout=logfile)

        print("KILLALL COMPLETE my_runner.out", file=logfile)
        logfile.flush()

        if killall_success != 0:
            msg = 'RUNNER ERROR: had to kill {} process(es)'.format(
                killall_success)
            print("pid", os.getpid(), msg)
            grade_items_logging.log_message(job_id, is_batch_job,
                                            which_untrusted, item_name, "", "",
                                            msg)

    if runner_success == 0:
        print(which_machine, which_untrusted, "RUNNER OK")
    else:
        print(which_machine, which_untrusted, "RUNNER FAILURE")
        grade_items_logging.log_message(job_id,
                                        is_batch_job,
                                        which_untrusted,
                                        item_name,
                                        message="RUNNER FAILURE")

    untrusted_grant_rwx_access(which_untrusted, tmp_work)
    untrusted_grant_rwx_access(which_untrusted, tmp_compilation)

    # --------------------------------------------------------------------
    # RUN VALIDATOR

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nVALIDATION STARTS",
              file=f)

    # copy results files from compilation...
    patterns_submission_to_validation = complete_config_obj["autograding"][
        "submission_to_validation"]
    pattern_copy("submission_to_validation", patterns_submission_to_validation,
                 submission_path, tmp_work, tmp_logs)
    if is_vcs:
        pattern_copy("checkout_to_validation",
                     patterns_submission_to_validation, checkout_subdir_path,
                     tmp_work, tmp_logs)
    patterns_compilation_to_validation = complete_config_obj["autograding"][
        "compilation_to_validation"]
    pattern_copy("compilation_to_validation",
                 patterns_compilation_to_validation, tmp_compilation, tmp_work,
                 tmp_logs)

    # remove the compilation directory
    shutil.rmtree(tmp_compilation)

    # copy output files to tmp_work directory
    copy_contents_into(job_id, test_output_path, tmp_work, tmp_logs)

    # copy any instructor custom validation code into the tmp work directory
    copy_contents_into(job_id, custom_validation_code_path, tmp_work, tmp_logs)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    # copy validator.out to the current directory
    shutil.copy(os.path.join(bin_path, "validate.out"),
                os.path.join(tmp_work, "my_validator.out"))

    # give the untrusted user read/write/execute permissions on the tmp directory & files
    add_permissions_recursive(tmp_work,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH,
                              stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH)

    add_permissions(os.path.join(tmp_work, "my_validator.out"),
                    stat.S_IROTH | stat.S_IXOTH)

    # validator the validator.out as the untrusted user
    with open(os.path.join(tmp_logs, "validator_log.txt"), 'w') as logfile:
        if USE_DOCKER:
            validator_success = subprocess.call([
                'docker', 'exec', '-w', tmp_work, container,
                os.path.join(tmp_work, 'my_validator.out'),
                queue_obj['gradeable'], queue_obj['who'],
                str(queue_obj['version']), submission_string
            ],
                                                stdout=logfile)
        else:
            validator_success = subprocess.call([
                os.path.join(SUBMITTY_INSTALL_DIR, "sbin",
                             "untrusted_execute"), which_untrusted,
                os.path.join(tmp_work, "my_validator.out"),
                queue_obj["gradeable"], queue_obj["who"],
                str(queue_obj["version"]), submission_string
            ],
                                                stdout=logfile)

    if validator_success == 0:
        print(which_machine, which_untrusted, "VALIDATOR OK")
    else:
        print(which_machine, which_untrusted, "VALIDATOR FAILURE")
        grade_items_logging.log_message(job_id,
                                        is_batch_job,
                                        which_untrusted,
                                        item_name,
                                        message="VALIDATION FAILURE")

    untrusted_grant_rwx_access(which_untrusted, tmp_work)

    # grab the result of autograding
    grade_result = ""
    with open(os.path.join(tmp_work, "grade.txt")) as f:
        lines = f.readlines()
        for line in lines:
            line = line.rstrip('\n')
            if line.startswith("Automatic grading total:"):
                grade_result = line

    # --------------------------------------------------------------------
    # MAKE RESULTS DIRECTORY & COPY ALL THE FILES THERE
    tmp_results = os.path.join(tmp, "TMP_RESULTS")

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        print("====================================\nARCHIVING STARTS", file=f)

    subprocess.call(['ls', '-lR', '.'],
                    stdout=open(tmp_logs + "/overall.txt", 'a'))

    os.makedirs(os.path.join(tmp_results, "details"))

    patterns_work_to_details = complete_config_obj["autograding"][
        "work_to_details"]
    pattern_copy("work_to_details", patterns_work_to_details, tmp_work,
                 os.path.join(tmp_results, "details"), tmp_logs)

    history_file_tmp = os.path.join(tmp_submission, "history.json")
    history_file = os.path.join(tmp_results, "history.json")
    if os.path.isfile(history_file_tmp):
        shutil.move(history_file_tmp, history_file)
        # fix permissions
        ta_group_id = os.stat(tmp_results).st_gid
        os.chown(history_file, int(HWCRON_UID), ta_group_id)
        add_permissions(history_file, stat.S_IRGRP)
    grading_finished = dateutils.get_current_time()

    shutil.copy(os.path.join(tmp_work, "grade.txt"), tmp_results)

    # -------------------------------------------------------------
    # create/append to the results history

    # grab the submission time
    with open(os.path.join(submission_path,
                           ".submit.timestamp")) as submission_time_file:
        submission_string = submission_time_file.read().rstrip()
    submission_datetime = dateutils.read_submitty_date(submission_string)

    gradeable_deadline_datetime = dateutils.read_submitty_date(
        gradeable_deadline_string)
    gradeable_deadline_longstring = dateutils.write_submitty_date(
        gradeable_deadline_datetime)
    submission_longstring = dateutils.write_submitty_date(submission_datetime)

    seconds_late = int(
        (submission_datetime - gradeable_deadline_datetime).total_seconds())
    # note: negative = not late

    with open(os.path.join(tmp_submission, ".grading_began"), 'r') as f:
        grading_began_longstring = f.read()
    grading_began = dateutils.read_submitty_date(grading_began_longstring)
    grading_finished_longstring = dateutils.write_submitty_date(
        grading_finished)

    gradingtime = (grading_finished - grading_began).total_seconds()

    with open(os.path.join(tmp_submission, "queue_file.json"), 'r') as infile:
        queue_obj = json.load(infile)
    queue_obj["gradingtime"] = gradingtime
    queue_obj["grade_result"] = grade_result
    queue_obj["which_untrusted"] = which_untrusted

    with open(os.path.join(tmp_results, "queue_file.json"), 'w') as outfile:
        json.dump(queue_obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    with open(os.path.join(tmp_work, "results.json"), 'r') as read_file:
        results_obj = json.load(read_file)
    if 'revision' in queue_obj.keys():
        results_obj['revision'] = queue_obj['revision']
    with open(os.path.join(tmp_results, "results.json"), 'w') as outfile:
        json.dump(results_obj,
                  outfile,
                  sort_keys=True,
                  indent=4,
                  separators=(',', ': '))

    write_grade_history.just_write_grade_history(
        history_file, gradeable_deadline_longstring, submission_longstring,
        seconds_late, queue_time_longstring, is_batch_job_string,
        grading_began_longstring, int(waittime), grading_finished_longstring,
        int(gradingtime), grade_result)

    os.chdir(SUBMITTY_DATA_DIR)

    if USE_DOCKER:
        with open(os.path.join(tmp_logs, "overall_log.txt"), 'w') as logfile:
            chmod_success = subprocess.call([
                'docker', 'exec', '-w', tmp_work, container, 'chmod', '-R',
                'o+rwx', '.'
            ],
                                            stdout=logfile)

    with open(os.path.join(tmp_logs, "overall.txt"), 'a') as f:
        f.write("FINISHED GRADING!\n")

    # save the logs!
    shutil.copytree(tmp_logs, os.path.join(tmp_results, "logs"))

    # zip up results folder
    filehandle, my_results_zip_file = tempfile.mkstemp()
    zip_my_directory(tmp_results, my_results_zip_file)
    os.close(filehandle)
    shutil.rmtree(tmp_autograding)
    shutil.rmtree(tmp_submission)
    shutil.rmtree(tmp_results)
    shutil.rmtree(tmp_work)
    shutil.rmtree(tmp)

    # WIP: extra logging for testing
    #grade_items_logging.log_message(job_id,is_batch_job,which_untrusted,submission_path,message="done grading")

    # --------------------------------------------------------------------
    # CLEAN UP DOCKER
    if USE_DOCKER:
        subprocess.call(['docker', 'rm', '-f', container])
        dockerdestroy_done = dateutils.get_current_time()
        dockerdestroy_time = (dockerdestroy_done -
                              grading_finished).total_seconds()
        grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                        submission_path, "ddt:",
                                        dockerdestroy_time,
                                        "docker container destroyed")

    grade_items_logging.log_message(job_id, is_batch_job, which_untrusted,
                                    item_name, "grade:", gradingtime,
                                    grade_result)

    return my_results_zip_file