Exemple #1
0
def get_repo_type(repo_name):
    """
    Returns either "group" or "personal", depending on what repo_name appears to be. If repo is
    unidentifiable, return None.

    """
    if re.match(r'^group\d+$', repo_name):
        repo_type = "group"
    elif re.match(r'^[a-z]{2,3}$', repo_name):
        repo_type = "personal"
    else:
        repo_type = None

    # This is a useful hook to change how repos are classified, based on their name. The default is
    # pretty good, but you may have extra repos that don't exactly follow this naming scheme.
    #
    # Arguments:
    #   repo_type -- The original guess for the repo_type (see code above)
    #   repo_name -- The name of the repo (e.g. "aa" or "group1")
    #
    # Returns:
    #   Either "group" or "personal"
    repo_type = apply_filters("get-repo-type", repo_type, repo_name)

    return repo_type
Exemple #2
0
def get_repo_type(repo_name):
    """
    Returns either "group" or "personal", depending on what repo_name appears to be. If repo is
    unidentifiable, return None.

    """
    if re.match(r'^group\d+$', repo_name):
        repo_type = "group"
    elif re.match(r'^[a-z]{2,3}$', repo_name):
        repo_type = "personal"
    else:
        repo_type = None

    # This is a useful hook to change how repos are classified, based on their name. The default is
    # pretty good, but you may have extra repos that don't exactly follow this naming scheme.
    #
    # Arguments:
    #   repo_type -- The original guess for the repo_type (see code above)
    #   repo_name -- The name of the repo (e.g. "aa" or "group1")
    #
    # Returns:
    #   Either "group" or "personal"
    repo_type = apply_filters("get-repo-type", repo_type, repo_name)

    return repo_type
def pushhook():
    payload_bytes = request.get_data()
    if request.form.get("_csrf_token"):
        # You should not be able to use a CSRF token for this
        abort(400)
    try:
        payload = json.loads(payload_bytes)
        assert isinstance(payload, dict)
        if payload.get("action", "push") != "push":
            logging.warning("Dropped GitHub pushhook payload because action was %s" %
                            str(payload.get("action")))
            return ('', 204)
        ref = payload["ref"]
        before = payload["before"]
        after = payload["after"]
        assert isinstance(ref, basestring)
        assert isinstance(before, basestring)
        assert isinstance(after, basestring)
        repo_name = payload["repository"]["name"]
        assert isinstance(repo_name, basestring)
        file_list = get_diff_file_list(repo_name, before, after)
        if not file_list:
            file_list = []

        # This is a useful hook to use, if you want to add custom logic to determine which jobs get
        # run on a Git push.
        #
        # Arguments:
        #   jobs           -- The original list of jobs (default: empty list)
        #   repo_name      -- The name of the repo that caused the pushhook
        #   ref            -- The name of the ref that was pushed (e.g. "refs/heads/master")
        #   modified_files -- A list of files that were changed in the push, relative to repo root
        #
        # Returns:
        #   A list of job names. (e.g. ["hw0", "hw0-style-check"])
        jobs_to_run = apply_filters("pushhooks-jobs-to-run", [], repo_name, ref, file_list)

        if not jobs_to_run:
            return ('', 204)

        # We could probably grab this from the payload, but let's call this method for the sake
        # of consistency.
        message = get_commit_message(repo_name, after)

        for job_to_run in jobs_to_run:
            while True:
                try:
                    with DbCursor() as c:
                        build_name = create_build(c, job_to_run, repo_name, after, message)
                    break
                except apsw.Error:
                    logging.exception("Failed to create build, retrying...")
            job = Job(build_name, repo_name, "GitHub push")
            dockergrader_queue.enqueue(job)
        return ('', 204)
    except Exception:
        logging.exception("Error occurred while processing GitHub pushhook payload")
        abort(500)
Exemple #4
0
def pushhook():
    payload_bytes = request.get_data()
    if request.form.get("_csrf_token"):
        # You should not be able to use a CSRF token for this
        abort(400)
    try:
        payload = json.loads(payload_bytes)
        assert isinstance(payload, dict)
        if payload.get("action", "push") != "push":
            logging.warning("Dropped GitHub pushhook payload because action was %s" % str(payload.get("action")))
            return ("", 204)
        ref = payload["ref"]
        before = payload["before"]
        after = payload["after"]
        assert isinstance(ref, basestring)
        assert isinstance(before, basestring)
        assert isinstance(after, basestring)
        repo_name = payload["repository"]["name"]
        assert isinstance(repo_name, basestring)
        file_list = get_diff_file_list(repo_name, before, after)
        if not file_list:
            file_list = []

        # This is a useful hook to use, if you want to add custom logic to determine which jobs get
        # run on a Git push.
        #
        # Arguments:
        #   jobs           -- The original list of jobs (default: empty list)
        #   repo_name      -- The name of the repo that caused the pushhook
        #   ref            -- The name of the ref that was pushed (e.g. "refs/heads/master")
        #   modified_files -- A list of files that were changed in the push, relative to repo root
        #
        # Returns:
        #   A list of job names. (e.g. ["hw0", "hw0-style-check"])
        jobs_to_run = apply_filters("pushhooks-jobs-to-run", [], repo_name, ref, file_list)

        if not jobs_to_run:
            return ("", 204)

        # We could probably grab this from the payload, but let's call this method for the sake
        # of consistency.
        message = get_commit_message(repo_name, after)

        for job_to_run in jobs_to_run:
            while True:
                try:
                    with DbCursor() as c:
                        build_name = create_build(c, job_to_run, repo_name, after, message)
                    break
                except apsw.Error:
                    logging.exception("Failed to create build, retrying...")
            job = Job(build_name, repo_name, "GitHub push")
            dockergrader_queue.enqueue(job)
        return ("", 204)
    except Exception:
        logging.exception("Error occurred while processing GitHub pushhook payload")
        abort(500)
Exemple #5
0
 def __init__(self, path=None, read_only=False):
     if path is None:
         path = config.database_path
     if read_only:
         flags = apsw.SQLITE_OPEN_READONLY
     else:
         flags = apsw.SQLITE_OPEN_CREATE | apsw.SQLITE_OPEN_READWRITE
     self.connection = apsw.Connection(path, flags)
     self.connection.setbusytimeout(5000)
     for module in apply_filters("database-vtmodules", [self.get_assignments_vtmodule()]):
         module.registerWithConnection(self.connection)
Exemple #6
0
    def __init__(self, path=None, read_only=False):
        if path is None:
            path = config.database_path
        if read_only:
            flags = apsw.SQLITE_OPEN_READONLY
        else:
            flags = apsw.SQLITE_OPEN_CREATE | apsw.SQLITE_OPEN_READWRITE

        # The global lock is acquired in the constructor, so you must never instantiate a DbCursor
        # object without actually using it.
        global_database_lock.acquire()

        try:
            # The connection setup must be done in the constructor, NOT in __enter__.
            # If __enter__ raises an exception, then the __exit__ method will also be called.
            self.connection = apsw.Connection(path, flags)
            self.connection.setbusytimeout(5000)
            for module in apply_filters("database-vtmodules", [self.get_assignments_vtmodule()]):
                module.registerWithConnection(self.connection)
        except Exception:
            global_database_lock.release()
            raise
Exemple #7
0
    def process_job(self, operation, payload):
        """
        Connects to the configured SMTP server using connect_to_smtp defined in config/algorithms
        and sends an email.

        """
        if operation == "send":
            # This is a useful hook for changing the SMTP server that is used by the mail queue. You
            # can, for example, connect to a 3rd party email relay to send emails. You can also just
            # connect to 127.0.0.1 (there's a mail server running on most of the INST servers).
            #
            # Arguments:
            #   smtp_server -- A smtplib.SMTP() object.
            #
            # Returns:
            #   An smtplib.SMTP() object (or compatible) that can be used to send mail.
            smtp_server = apply_filters("connect-to-smtp", smtplib.SMTP())

            smtp_server.sendmail(*payload)
            smtp_server.quit()
        else:
            logging.warning("Unknown operation requested in mailerqueue: %s" % operation)
    def __init__(self, path=None, read_only=False):
        if path is None:
            path = config.database_path
        if read_only:
            flags = apsw.SQLITE_OPEN_READONLY
        else:
            flags = apsw.SQLITE_OPEN_CREATE | apsw.SQLITE_OPEN_READWRITE

        # The global lock is acquired in the constructor, so you must never instantiate a DbCursor
        # object without actually using it.
        global_database_lock.acquire()

        try:
            # The connection setup must be done in the constructor, NOT in __enter__.
            # If __enter__ raises an exception, then the __exit__ method will also be called.
            self.connection = apsw.Connection(path, flags)
            self.connection.setbusytimeout(5000)
            for module in apply_filters("database-vtmodules",
                                        [self.get_assignments_vtmodule()]):
                module.registerWithConnection(self.connection)
        except Exception:
            global_database_lock.release()
            raise
Exemple #9
0
    def process_job(self, operation, payload):
        """
        Connects to the configured SMTP server using connect_to_smtp defined in config/algorithms
        and sends an email.

        """
        if operation == "send":
            # This is a useful hook for changing the SMTP server that is used by the mail queue. You
            # can, for example, connect to a 3rd party email relay to send emails. You can also just
            # connect to 127.0.0.1 (there's a mail server running on most of the INST servers).
            #
            # Arguments:
            #   smtp_server -- A smtplib.SMTP() object.
            #
            # Returns:
            #   An smtplib.SMTP() object (or compatible) that can be used to send mail.
            smtp_server = apply_filters("connect-to-smtp", smtplib.SMTP())

            smtp_server.sendmail(*payload)
            smtp_server.quit()
        else:
            logging.warning("Unknown operation requested in mailerqueue: %s" %
                            operation)