예제 #1
0
async def get_login_modal_data():
    return make_success_response({
        'clientId':
        config_file.get("google_client_id"),
        'hostedDomain':
        config_file.get("allowed_email_domain")
    })
예제 #2
0
async def get_game_data(data: GetGameData,
                        user: User = Security(get_current_user,
                                              scopes=["submission.play"])):
    with cuwais.database.create_session() as db_session:
        allowed = queries.are_submissions_playable(db_session,
                                                   data.submission_ids,
                                                   user.id)

    return make_success_response({
        'allowed': allowed,
        'gamemode': {
            'id': config_file.get("gamemode.id"),
            'options': config_file.get("gamemode.options"),
        }
    })
예제 #3
0
def create_raw_files_submission(db_session: Session, user: User, files: List[SubmissionRawFileData]) -> int:
    url = "file://localfiles"

    size = sum([len(v) for f, v in files])
    if size > int(config_file.get("max_repo_size_bytes")):
        raise RepoTooBigException(url)

    # Calculate submission hash
    digest = hashlib.sha256()
    for file in files:
        digest.update(file.data.encode())
    files_hash = cuwais.common.calculate_git_hash(user.id, digest.hexdigest(), url)

    logging.info(f"New raw submission with hash {files_hash}")
    archive_dir = get_repo_path(files_hash)
    if archive_dir.exists():
        raise AlreadyExistsException(url)

    # Create tar and save
    with tarfile.open(archive_dir, mode='w') as tar:
        for file in files:
            data = file.data.encode()
            fileobj = io.BytesIO(data)
            info = tarfile.TarInfo(name=file.fileName)
            info.size = len(data)
            tar.addfile(info, fileobj)

    return create_submission(db_session, user, url, files_hash)
예제 #4
0
def download_repository(user_id: int, url: str) -> str:
    if "\n" in url:
        raise InvalidGitURL("Invalid URL", url)

    buf = StringIO()
    try:
        sh.git("ls-remote", url, _out=buf)
    except sh.ErrorReturnCode:
        raise InvalidGitURL("Invalid GIT URL", url)

    ping_string = str(buf.getvalue())
    match = GIT_HASH_RE.match(ping_string)
    if match is None:
        raise InvalidGitURL("GIT URL has no HEAD", url)

    commit_hash = match.group(1)
    files_hash = cuwais.common.calculate_git_hash(user_id, commit_hash, url)

    clone_dir = Path(GIT_BASE_DIR, files_hash)
    clone_dir_str = str(clone_dir.absolute())

    archive_dir = get_repo_path(files_hash)
    archive_dir_str = str(archive_dir.absolute())

    _cloning_dirs_mutex.acquire()
    try:
        if clone_dir.exists():
            raise AlreadyCloningException(url)

        if archive_dir.exists():
            raise AlreadyExistsException(url)

        os.mkdir(clone_dir)
    finally:
        _cloning_dirs_mutex.release()

    try:
        sh.git.clone(url, clone_dir_str, "--depth=1")

        size = get_dir_size_bytes(clone_dir_str)
        if size > int(config_file.get("max_repo_size_bytes")):
            # TODO: Cache too big entries in redis
            raise RepoTooBigException(url)

        sh.git.archive("--output=" + archive_dir_str,
                       "--format=tar",
                       "HEAD",
                       _cwd=clone_dir_str)
    except Exception as e:
        logging.exception(e)
        raise CantCloneException(url)
    finally:
        if clone_dir.exists():
            rmtree(clone_dir_str)

    return files_hash
예제 #5
0
async def add_submission_raw_files(data: SubmissionRawFilesData,
                                   user: User = Security(
                                       get_current_user,
                                       scopes=["submission.add"])):
    try:
        with cuwais.database.create_session() as db_session:
            submission_id = queries.create_raw_files_submission(
                db_session, user, data.files)
            db_session.commit()
    except repo.AlreadyExistsException:
        logging.debug(f"New raw submission failed as it was already submitted")
        return make_fail_response(
            config_file.get("localisation.git_errors.already-submitted"))
    except repo.RepoTooBigException:
        logging.debug(f"New raw submission failed as it was too large")
        return make_fail_response(
            config_file.get("localisation.git_errors.too-large"))

    return make_success_response({"submission_id": submission_id})
예제 #6
0
async def delete_submission(data: SubmissionRequestData,
                            user: User = Security(
                                get_current_user,
                                scopes=["submissions.remove"])):
    with cuwais.database.create_session() as db_session:
        if not queries.submission_is_owned_by_user(
                db_session, data.submission_id, user.id):
            return make_fail_response(
                config_file.get("localisation.submission_access_error"))

        queries.delete_submission(db_session, data.submission_id)

    return make_success_response()
예제 #7
0
async def add_bot(data: BotData,
                  _: User = Security(get_current_user, scopes=["bot.add"])):
    with cuwais.database.create_session() as db_session:
        bot = queries.create_bot(db_session, data.name)
        db_session.flush()
        try:
            submission_id = queries.create_git_submission(
                db_session, bot, data.url)
        except repo.InvalidGitURL:
            return make_fail_response(
                config_file.get("localisation.git_errors.invalid-url"))
        except repo.AlreadyExistsException:
            return make_fail_response(
                config_file.get("localisation.git_errors.already-submitted"))
        except repo.RepoTooBigException:
            return make_fail_response(
                config_file.get("localisation.git_errors.too-large"))
        except repo.CantCloneException:
            return make_fail_response(
                config_file.get("localisation.git_errors.clone-fail"))
        db_session.commit()

    return make_success_response({"submission_id": submission_id})
예제 #8
0
def get_user_from_google_token(db_session, token) -> User:
    id_info = None
    try:
        # Specify the CLIENT_ID of the app that accesses the backend:
        google_request = google.auth.transport.requests.Request(
            session=cached_session)
        id_info = id_token.verify_oauth2_token(token, google_request,
                                               _client_id)

        if not str(id_info['iss']).endswith('accounts.google.com'):
            raise ValueError('Wrong issuer.')

    except exceptions.GoogleAuthError as e1:
        logging.warning(
            f"Attempted login with invalid token: {token}; {id_info}; {e1}")
        abort(400)
    except ValueError as e2:
        logging.warning(
            f"Attempted login with invalid token: {token}; {id_info}; {e2}")
        abort(400)

    email = str(id_info['email'])
    required_domain = config_file.get("config.allowed_email_domain")
    if required_domain is not None and not email.endswith(
            f'@{required_domain}'):
        logging.warning(f"Non-uni email: {email}; {id_info}")
        abort(400)

    if not bool(id_info['email_verified']):
        logging.warning(f"Unverified email: {email}; {id_info}")
        abort(400)

    # User ID stored in value 'sub'
    # See https://developers.google.com/identity/protocols/oauth2/openid-connect
    google_id = str(id_info['sub'])
    name = str(id_info['name'])

    user = db_session.execute(
        select(User).where(User.google_id == google_id)).scalar_one_or_none()

    if user is None:
        nick = generate_nickname(db_session)
        user = User(nickname=nick,
                    real_name=name,
                    google_id=google_id,
                    is_admin=email in ADMINS)
        db_session.add(user)

    return user
예제 #9
0
async def set_submission_active(data: SubmissionActiveData,
                                user: User = Security(
                                    get_current_user,
                                    scopes=["submission.modify"])):
    with cuwais.database.create_session() as db_session:
        if not queries.submission_is_owned_by_user(
                db_session, data.submission_id, user.id):
            return make_fail_response(
                config_file.get("localisation.submission_access_error"))

        queries.set_submission_enabled(db_session, data.submission_id,
                                       data.enabled)
        db_session.commit()

    return make_success_response({"submission_id": data.submission_id})
예제 #10
0
async def get_accessible_navbar(user: Optional[User] = Security(
    get_current_user_or_none, scopes=["me"])):
    places = ['about']
    if user is not None:
        places += ['leaderboard', 'submissions', 'me', 'logout']
        if user.is_admin:
            places += ['admin']
    else:
        places += ['login']

    return make_success_response({
        "soc_name":
        config_file.get("soc_name").upper(),
        "accessible":
        places
    })
예제 #11
0
def get_leaderboard_graph(db_session: Session, querying_user_id: int):
    deltas = get_leaderboard_graph_data()

    users = {}
    init = int(config_file.get("initial_score"))
    for delta in deltas:
        other_user_id = delta['user_id']
        user = db_session.query(User).get(other_user_id)

        # If user has been deleted since the cache
        if user is None:
            del deltas[other_user_id]
            continue

        users[str(other_user_id)] = user.to_public_dict()
        users[str(other_user_id)]["is_you"] = other_user_id == querying_user_id

    return {"users": users, "deltas": deltas, "initial_score": init}
예제 #12
0
def get_scoreboard_data():
    with cuwais.database.create_session() as db_session:
        user_scores = db_session.query(
            User,
            func.sum(Result.points_delta).label("total_score")
        ).outerjoin(User.submissions) \
            .outerjoin(Submission.results) \
            .group_by(User.id) \
            .order_by("total_score") \
            .all()

        since = datetime.now() - timedelta(hours=24)
        counts = {}
        for outcome in Outcome:
            user_outcome_counts = db_session.query(
                User.id,
                func.count(Result.id)
            ).join(User.submissions) \
                .join(Submission.results) \
                .join(Result.match) \
                .filter(Result.outcome == int(outcome.value), Result.healthy == True, Result.points_delta != 0) \
                .filter(Match.match_date > since) \
                .group_by(User.id) \
                .all()

            counts[outcome] = user_outcome_counts

    # Convert outcomes to wins/losses/draws
    counts_by_outcome = {o: {user_id: count for user_id, count in counts[o]} for o in Outcome}

    init = int(config_file.get("initial_score"))
    outcomes = {user.id: {"wins": counts_by_outcome[Outcome.Win].get(user.id, 0),
                          "losses": counts_by_outcome[Outcome.Loss].get(user.id, 0),
                          "draws": counts_by_outcome[Outcome.Draw].get(user.id, 0)} for [user, _] in user_scores}
    scores = [make_scoreboard_entry(user, score, init, outcomes[user.id])
              for [user, score] in reversed(user_scores)]

    scores.sort(key=lambda e: e["score"], reverse=True)

    return scores
예제 #13
0
import logging

from cuwais.config import config_file

logging.basicConfig(
    format='%(asctime)s.%(msecs)03d %(levelname)-8s %(message)s' if config_file.get("debug") else '%(message)s',
    level=logging.DEBUG if config_file.get("debug") else logging.WARNING,
    datefmt='%Y-%m-%d %H:%M:%S')

logger = logging.getLogger("submission-runner")
예제 #14
0
from cuwais.config import config_file

with open("/run/secrets/secret_key") as secrets_file:
    secret = "".join(secrets_file.readlines())
    SECRET_KEY = secret

DEBUG = config_file.get("debug")
PROFILE = config_file.get("profile")
예제 #15
0
from cuwais.config import config_file

with open("/run/secrets/secret_key") as secrets_file:
    secret = "".join(secrets_file.readlines())
    SECRET_KEY = secret
DEBUG = config_file.get("debug")
SECURE = config_file.get("secure")

PROFILE = config_file.get("profile")

SERVER_NAME = config_file.get("front_end.server_name")

ACCESS_TOKEN_EXPIRE_MINUTES = config_file.get(
    "front_end.access_token_expire_minutes")
ACCESS_TOKEN_ALGORITHM = config_file.get("front_end.access_token_algorithm")

ADMINS = {str(account) for account in config_file.get("admin_emails")}
예제 #16
0
from google.auth import exceptions
from google.oauth2 import id_token
import google.auth.transport.requests
import cachecontrol
import requests
from sqlalchemy import select

from werkzeug.exceptions import abort

from app import queries
from app.config import ADMINS
from app.queries import generate_nickname

session = requests.session()
cached_session = cachecontrol.CacheControl(session)
_client_id = config_file.get("google_client_id")


def get_user_from_google_token(db_session, token) -> User:
    id_info = None
    try:
        # Specify the CLIENT_ID of the app that accesses the backend:
        google_request = google.auth.transport.requests.Request(
            session=cached_session)
        id_info = id_token.verify_oauth2_token(token, google_request,
                                               _client_id)

        if not str(id_info['iss']).endswith('accounts.google.com'):
            raise ValueError('Wrong issuer.')

    except exceptions.GoogleAuthError as e1:
예제 #17
0
async def run(submission_hash: str) -> AsyncIterator[Connection]:
    docker = None
    container = None

    try:
        # Attach to docker
        docker = aiodocker.Docker()

        # Create container
        logger.debug(f"Creating container for hash {submission_hash}")
        env_vars = _get_env_vars()
        try:
            container = await _make_sandbox_container(docker, env_vars)
        except DockerError:
            logger.error(traceback.format_exc())
            raise

        # Copy information
        logger.debug(f"Container {container.id}: copying scripts")
        await _copy_sandbox_scripts(container)
        logger.debug(f"Container {container.id}: copying submission")
        await _copy_submission(container, submission_hash)
        logger.debug(f"Container {container.id}: locking down")
        await _lock_down(container)

        # Start script
        logger.debug(f"Container {container.id}: running script")
        run_t = int(
            config_file.get('submission_runner.sandbox_run_timeout_seconds'))
        run_script_cmd = f"./sandbox/run.sh 'play.py' {run_t}"
        cmd_exec = await container.exec(cmd=run_script_cmd,
                                        user='******',
                                        stdin=True,
                                        stdout=True,
                                        stderr=True,
                                        tty=False,
                                        environment=env_vars,
                                        workdir="/home/sandbox/")
        unrun_t = int(
            config_file.get('submission_runner.sandbox_unrun_timeout_seconds'))
        cmd_stream: Stream = cmd_exec.start(timeout=unrun_t)

        # Set up input to the container
        async def send_handler(m: str):
            logger.debug(f"Container {container.id} <-- '{m.encode()}'")
            await cmd_stream.write_in((m + "\n").encode())

        # Set up output from the container
        async def receive_handler() -> AsyncGenerator[str, None]:
            while True:
                message: aiodocker.stream.Message = await cmd_stream.read_out()
                if message is None:
                    break
                logger.debug(f"Container {container.id} --> '{message}'")
                yield bytes(message.data).decode()

        # Process output from the container
        logger.debug(f"Container {container.id}: setting up output processing")
        lines = _get_lines(receive_handler())

        logger.debug(f"Container {container.id}: connecting")
        yield MessagePrintConnection(send_handler, lines, container.id)

    finally:
        # Clean everything up
        if container is not None:
            logger.debug(f"Container {container.id}: cleaning up")
            await container.delete(force=True)
        if docker is not None:
            await docker.close()
예제 #18
0
def _get_env_vars() -> dict:
    env_vars = dict()
    env_vars['PYTHONPATH'] = "/home/sandbox/"
    env_vars['DEBUG'] = str(config_file.get("debug"))

    return env_vars
예제 #19
0
async def _make_sandbox_container(
        client: aiodocker.docker.Docker,
        env_vars: dict) -> aiodocker.docker.DockerContainer:
    mem_limit = _to_bytes(
        config_file.get("submission_runner.sandbox_memory_limit"))
    max_repo_size_bytes = int(config_file.get("max_repo_size_bytes"))
    cpu_quota = int(
        100000 * float(config_file.get("submission_runner.sandbox_cpu_count")))

    tmpfs_flags = "rw,noexec,nosuid,noatime"  # See mount command

    # See https://docs.docker.com/engine/api/v1.30/#operation/ContainerCreate
    config = {
        "Image": DOCKER_IMAGE_NAME,
        # "Cmd": f"ls -al /",
        "Tty": True,
        "User": '******',
        "Env": [f"{key}={env_vars[key]}" for key in env_vars],
        "NetworkDisabled": True,
        "HostConfig": {
            # See https://docs.docker.com/engine/reference/run/#runtime-privilege-and-linux-capabilities
            "Capdrop": [
                "AUDIT_WRITE",
                "CHOWN",
                "DAC_OVERRIDE",
                # "FOWNER",  # Allows chmod
                "FSETID",
                "KILL",
                "MKNOD",
                "NET_BIND_SERVICE",
                "NET_RAW",
                "SETFCAP",
                "SETGID",
                "SETPCAP",
                "SETUID",
                "SYS_CHROOT"
            ],
            "Tmpfs": {
                '/tmp': f'{tmpfs_flags},size=1M',
                '/var/tmp': f'{tmpfs_flags},size=1M',
                '/run/lock': f'{tmpfs_flags},size=1M',
                '/var/lock': f'{tmpfs_flags},size=1M'
            },
            "ShmSize":
            1 * 1024 * 1024,
            "NetworkMode":
            "none",
            "CpuPeriod":
            100000,
            "CpuQuota":
            cpu_quota,
            "Memory":
            mem_limit // 2,
            "MemorySwap":
            mem_limit,
            "OomKillDisable":
            True,
            "DiskQuota":
            max_repo_size_bytes + 2 * 1024 * 1024,
            "AutoRemove":
            True,
        }
    }

    container = await client.containers.create(config)
    await container.start()

    return container
예제 #20
0
def reason_crash(reason):
    crash_reasons = config_file.get("localisation.crash_reasons")
    default_crash_reason = config_file.get("localisation.default_crash_reason")
    return crash_reasons.get(reason, default_crash_reason)
예제 #21
0
SubmissionsData = namedtuple("SubmissionsData", ["base", "extensions"])

SUBMISSIONS_PATH = "/home/web_user/default_submissions/"

SUBMISSIONS: Dict[str, SubmissionsData] = {
    "chess": SubmissionsData("chess_base", ["chess_random_move"])
}

# Check that all submissions are present
for data in SUBMISSIONS.values():
    assert os.path.isdir(SUBMISSIONS_PATH + data.base)
    for ext in data.extensions:
        assert os.path.isdir(SUBMISSIONS_PATH + ext)

DEFAULT_SUBMISSION = config_file.get("gamemode.default_submission")
if DEFAULT_SUBMISSION is None:
    DEFAULT_SUBMISSION = SUBMISSIONS[config_file.get("gamemode.id")].extensions[0]

# Check that the config option is valid
assert DEFAULT_SUBMISSION in SUBMISSIONS[config_file.get("gamemode.id")].extensions

BASE_SUBMISSION_PATH = SUBMISSIONS_PATH + SUBMISSIONS[config_file.get("gamemode.id")].base
DEFAULT_SUBMISSION_PATH = SUBMISSIONS_PATH + DEFAULT_SUBMISSION


def make_zip(base_dir, addition_dir):
    zip_path = addition_dir + ".zip"
    with zipfile.ZipFile(zip_path, mode='w') as my_zipfile:
        def add_dir_to_zip(dir_to_zip):
            for root, dirs, files in os.walk(dir_to_zip):