コード例 #1
0
ファイル: auth.py プロジェクト: shareef12/CTFd
def oauth_login():
    endpoint = get_app_config('OAUTH_AUTHORIZATION_ENDPOINT') \
        or get_config('oauth_authorization_endpoint') \
        or 'https://auth.majorleaguecyber.org/oauth/authorize'

    if get_config('user_mode') == 'teams':
        scope = 'profile team'
    else:
        scope = 'profile'

    client_id = get_app_config('OAUTH_CLIENT_ID') or get_config('oauth_client_id')

    if client_id is None:
        error_for(
            endpoint='auth.login',
            message='OAuth Settings not configured. '
                    'Ask your CTF administrator to configure MajorLeagueCyber integration.'
        )
        return redirect(url_for('auth.login'))

    redirect_url = "{endpoint}?response_type=code&client_id={client_id}&scope={scope}&state={state}".format(
        endpoint=endpoint,
        client_id=client_id,
        scope=scope,
        state=session['nonce']
    )
    return redirect(redirect_url)
コード例 #2
0
ファイル: mailgun.py プロジェクト: ChienNguyenVP/kma_ctf
def sendmail(addr, text, subject):
    ctf_name = get_config("ctf_name")
    mailfrom_addr = get_config("mailfrom_addr") or get_app_config("MAILFROM_ADDR")
    mailfrom_addr = "{} <{}>".format(ctf_name, mailfrom_addr)

    mailgun_base_url = get_config("mailgun_base_url") or get_app_config(
        "MAILGUN_BASE_URL"
    )
    mailgun_api_key = get_config("mailgun_api_key") or get_app_config("MAILGUN_API_KEY")
    try:
        r = requests.post(
            mailgun_base_url + "/messages",
            auth=("api", mailgun_api_key),
            data={
                "from": mailfrom_addr,
                "to": [addr],
                "subject": subject,
                "text": text,
            },
            timeout=1.0,
        )
    except requests.RequestException as e:
        return (
            False,
            "{error} exception occured while handling your request".format(
                error=type(e).__name__
            ),
        )

    if r.status_code == 200:
        return True, "Email sent"
    else:
        return False, "Mailgun settings are incorrect"
コード例 #3
0
ファイル: auth.py プロジェクト: Edgaraaa/DisPlatform
def oauth_login():
    endpoint = (get_app_config("OAUTH_AUTHORIZATION_ENDPOINT")
                or get_config("oauth_authorization_endpoint")
                or "https://auth.majorleaguecyber.org/oauth/authorize")

    if get_config("user_mode") == "teams":
        scope = "profile team"
    else:
        scope = "profile"

    client_id = get_app_config("OAUTH_CLIENT_ID") or get_config(
        "oauth_client_id")

    if client_id is None:
        error_for(
            endpoint="auth.login",
            message="OAuth Settings not configured. "
            "Ask your CTF administrator to configure MajorLeagueCyber integration.",
        )
        return redirect(url_for("auth.login"))

    redirect_url = "{endpoint}?response_type=code&client_id={client_id}&scope={scope}&state={state}".format(
        endpoint=endpoint,
        client_id=client_id,
        scope=scope,
        state=session["nonce"])
    return redirect(redirect_url)
コード例 #4
0
 def __init__(self):
     super(BaseUploader, self).__init__()
     self.oauth2_access_token = os.getenv(
         "DROPBOX_OAUTH2_TOKEN") or get_app_config("DROPBOX_OAUTH2_TOKEN")
     self.root_path = (os.getenv("DROPBOX_ROOT_PATH")
                       or get_app_config("DROPBOX_ROOT_PATH") or "/CTFd")
     self.client = Dropbox(self.oauth2_access_token, timeout=100)
     self.write_mode = "add"  # can be set to overwrite
コード例 #5
0
ファイル: uploaders.py プロジェクト: mrigank-9594/srhctf_
 def _get_s3_connection(self):
     access_key = get_app_config('AWS_ACCESS_KEY_ID')
     secret_key = get_app_config('AWS_SECRET_ACCESS_KEY')
     endpoint = get_app_config('AWS_S3_ENDPOINT_URL')
     client = boto3.client('s3',
                           aws_access_key_id=access_key,
                           aws_secret_access_key=secret_key,
                           endpoint_url=endpoint)
     return client
コード例 #6
0
 def _get_s3_connection(self):
     access_key = get_app_config("AWS_ACCESS_KEY_ID")
     secret_key = get_app_config("AWS_SECRET_ACCESS_KEY")
     client = boto3.client(
         "s3",
         aws_access_key_id=access_key,
         aws_secret_access_key=secret_key,
         endpoint_url=self.endpoint,
     )
     return client
コード例 #7
0
ファイル: uploaders.py プロジェクト: frankli0324/CTFd
 def _get_s3_connection(self):
     access_key = get_app_config("AWS_ACCESS_KEY_ID")
     secret_key = get_app_config("AWS_SECRET_ACCESS_KEY")
     endpoint = get_app_config("AWS_S3_ENDPOINT_URL")
     client = boto3.client(
         "s3",
         config=Config(signature_version="s3v4"),
         aws_access_key_id=access_key,
         aws_secret_access_key=secret_key,
         endpoint_url=endpoint,
     )
     return client
コード例 #8
0
def get_s3_conn(app):
    access_key_id = utils.get_app_config('ACCESS_KEY_ID')
    secret_access_key = utils.get_app_config('SECRET_ACCESS_KEY')
    if access_key_id and secret_access_key:
        client = boto3.client('s3',
                              aws_access_key_id=access_key_id,
                              aws_secret_access_key=secret_access_key)
        bucket = utils.get_app_config('BUCKET')
        return client, bucket
    else:
        client = boto3.client('s3')
        bucket = utils.get_app_config('BUCKET')
        return client, bucket
コード例 #9
0
def sendmail(addr, text, subject):
    ctf_name = get_config("ctf_name")
    mailfrom_addr = get_config("mailfrom_addr") or get_app_config("MAILFROM_ADDR")
    mailfrom_addr = formataddr((ctf_name, mailfrom_addr))

    data = {
        "host": get_config("mail_server") or get_app_config("MAIL_SERVER"),
        "port": int(get_config("mail_port") or get_app_config("MAIL_PORT")),
    }
    username = get_config("mail_username") or get_app_config("MAIL_USERNAME")
    password = get_config("mail_password") or get_app_config("MAIL_PASSWORD")
    TLS = get_config("mail_tls") or get_app_config("MAIL_TLS")
    SSL = get_config("mail_ssl") or get_app_config("MAIL_SSL")
    auth = get_config("mail_useauth") or get_app_config("MAIL_USEAUTH")

    if username:
        data["username"] = username
    if password:
        data["password"] = password
    if TLS:
        data["TLS"] = TLS
    if SSL:
        data["SSL"] = SSL
    if auth:
        data["auth"] = auth

    try:
        smtp = get_smtp(**data)

        msg = EmailMessage()
        msg.set_content(text)

        msg["Subject"] = subject
        msg["From"] = mailfrom_addr
        msg["To"] = addr

        # Check whether we are using an admin-defined SMTP server
        custom_smtp = bool(get_config("mail_server"))

        # We should only consider the MAILSENDER_ADDR value on servers defined in config
        if custom_smtp:
            smtp.send_message(msg)
        else:
            mailsender_addr = get_app_config("MAILSENDER_ADDR")
            smtp.send_message(msg, from_addr=mailsender_addr)

        smtp.quit()
        return True, "Email sent"
    except smtplib.SMTPException as e:
        return False, str(e)
    except timeout:
        return False, "SMTP server connection timed out"
    except Exception as e:
        return False, str(e)
コード例 #10
0
def update_check(force=False):
    """
    Makes a request to ctfd.io to check if there is a new version of CTFd available. The service is provided in return
    for users opting in to anonymous usage data collection. Users can opt-out of update checks by specifying
    UPDATE_CHECK = False in config.py

    :param force:
    :return:
    """
    # If UPDATE_CHECK is disabled don't check for updates at all.
    if app.config.get("UPDATE_CHECK") is False:
        return

    # Don't do an update check if not setup
    if is_setup() is False:
        return

    # Get when we should check for updates next.
    next_update_check = get_config("next_update_check") or 0

    # If we have passed our saved time or we are forcing we should check.
    update = (next_update_check < time.time()) or force

    if update:
        try:
            name = str(get_config("ctf_name")) or ""
            params = {
                "ctf_id": sha256(name),
                "current": app.VERSION,
                "python_version_raw": sys.hexversion,
                "python_version": python_version(),
                "db_driver": db.session.bind.dialect.name,
                "challenge_count": Challenges.query.count(),
                "user_mode": get_config("user_mode"),
                "user_count": Users.query.count(),
                "team_count": Teams.query.count(),
                "theme": get_config("ctf_theme"),
                "upload_provider": get_app_config("UPLOAD_PROVIDER"),
                "channel": app.CHANNEL,
            }
            check = requests.get("https://versioning.ctfd.io/check",
                                 params=params,
                                 timeout=3).json()
        except requests.exceptions.RequestException:
            pass
        except ValueError:
            pass
        else:
            try:
                latest = check["resource"]["tag"]
                html_url = check["resource"]["html_url"]
                if StrictVersion(latest) > StrictVersion(app.VERSION):
                    set_config("version_latest", html_url)
                elif StrictVersion(latest) <= StrictVersion(app.VERSION):
                    set_config("version_latest", None)
                next_update_check_time = check["resource"].get(
                    "next", int(time.time() + 43200))
                set_config("next_update_check", next_update_check_time)
            except KeyError:
                set_config("version_latest", None)
コード例 #11
0
def update_check(force=False):
    """
    Makes a request to ctfd.io to check if there is a new version of CTFd available. The service is provided in return
    for users opting in to anonymous usage data collection. Users can opt-out of update checks by specifying
    UPDATE_CHECK = False in config.py

    :param force:
    :return:
    """
    # If UPDATE_CHECK is disabled don't check for updates at all.
    if app.config.get('UPDATE_CHECK') is False:
        return

    # Don't do an update check if not setup
    if is_setup() is False:
        return

    # Get when we should check for updates next.
    next_update_check = get_config('next_update_check') or 0

    # If we have passed our saved time or we are forcing we should check.
    update = (next_update_check < time.time()) or force

    if update:
        try:
            name = str(get_config('ctf_name')) or ''
            params = {
                'ctf_id': sha256(name),
                'current': app.VERSION,
                'python_version_raw': sys.hexversion,
                'python_version': python_version(),
                'db_driver': db.session.bind.dialect.name,
                'challenge_count': Challenges.query.count(),
                'user_mode': get_config('user_mode'),
                'user_count': Users.query.count(),
                'team_count': Teams.query.count(),
                'theme': get_config('ctf_theme'),
                'upload_provider': get_app_config('UPLOAD_PROVIDER')
            }
            check = requests.post('https://versioning.ctfd.io/',
                                  json=params,
                                  timeout=0.1).json()
        except requests.exceptions.RequestException as e:
            pass
        except ValueError as e:
            pass
        else:
            try:
                latest = check['resource']['tag']
                html_url = check['resource']['html_url']
                if StrictVersion(latest) > StrictVersion(app.VERSION):
                    set_config('version_latest', html_url)
                elif StrictVersion(latest) <= StrictVersion(app.VERSION):
                    set_config('version_latest', None)
                next_update_check_time = check['resource'].get(
                    'next', int(time.time() + 43200))
                set_config('next_update_check', next_update_check_time)
            except KeyError:
                set_config('version_latest', None)
コード例 #12
0
def sendmail(addr, text, subject):
    ctf_name = get_config("ctf_name")
    mailfrom_addr = get_config("mailfrom_addr") or get_app_config("MAILFROM_ADDR")
    mailfrom_addr = "{} <{}>".format(ctf_name, mailfrom_addr)

    data = {
        "host": get_config("mail_server") or get_app_config("MAIL_SERVER"),
        "port": int(get_config("mail_port") or get_app_config("MAIL_PORT")),
    }
    username = get_config("mail_username") or get_app_config("MAIL_USERNAME")
    password = get_config("mail_password") or get_app_config("MAIL_PASSWORD")
    TLS = get_config("mail_tls") or get_app_config("MAIL_TLS")
    SSL = get_config("mail_ssl") or get_app_config("MAIL_SSL")
    auth = get_config("mail_useauth") or get_app_config("MAIL_USEAUTH")

    if username:
        data["username"] = username
    if password:
        data["password"] = password
    if TLS:
        data["TLS"] = TLS
    if SSL:
        data["SSL"] = SSL
    if auth:
        data["auth"] = auth

    try:
        smtp = get_smtp(**data)

        if six.PY2:
            msg = MIMEText(text)
        else:
            msg = EmailMessage()
            msg.set_content(text)

        msg["Subject"] = subject
        msg["From"] = mailfrom_addr
        msg["To"] = addr

        if six.PY2:
            smtp.sendmail(msg["From"], [msg["To"]], msg.as_string())
        else:
            smtp.send_message(msg)

        smtp.quit()
        return True, "Email sent"
    except smtplib.SMTPException as e:
        return False, str(e)
    except timeout:
        return False, "SMTP server connection timed out"
    except Exception as e:
        return False, str(e)
コード例 #13
0
def subscribe():
    @stream_with_context
    def gen():
        for event in current_app.events_manager.subscribe():
            yield str(event)

    enabled = get_app_config("SERVER_SENT_EVENTS")
    if enabled is False:
        return ("", 204)

    return Response(gen(), mimetype="text/event-stream")
コード例 #14
0
ファイル: smtp.py プロジェクト: shareef12/CTFd
def sendmail(addr, text):
    ctf_name = get_config('ctf_name')
    mailfrom_addr = get_config('mailfrom_addr') or get_app_config('MAILFROM_ADDR')
    data = {
        'host': get_config('mail_server'),
        'port': int(get_config('mail_port'))
    }
    username = get_config('mail_username') or get_app_config('MAIL_USERNAME')
    password = get_config('mail_password') or get_app_config('MAIL_PASSWORD')
    TLS = get_config('mail_tls') or get_app_config('MAIL_TLS')
    SSL = get_config('mail_ssl') or get_app_config('MAIL_SSL')
    if username:
        auth = True

    if username:
        data['username'] = username
    if password:
        data['password'] = password
    if TLS:
        data['TLS'] = TLS
    if SSL:
        data['SSL'] = SSL
    if auth:
        data['auth'] = auth

    try:
        smtp = get_smtp(**data)
        msg = MIMEText(text)
        msg['Subject'] = "Message from {0}".format(ctf_name)
        msg['From'] = mailfrom_addr
        msg['To'] = addr

        smtp.sendmail(msg['From'], [msg['To']], msg.as_string())
        smtp.quit()
        return True, "Email sent"
    except smtplib.SMTPException as e:
        return False, str(e)
    except timeout:
        return False, "SMTP server connection timed out"
    except Exception as e:
        return False, str(e)
コード例 #15
0
ファイル: mailgun.py プロジェクト: dsegna/CTFd-OSINT
def sendmail(addr, text):
    ctf_name = get_config('ctf_name')
    mailfrom_addr = get_config('mailfrom_addr') or get_app_config('MAILFROM_ADDR')
    mailgun_base_url = get_config('mailgun_base_url') or get_app_config('MAILGUN_BASE_URL')
    mailgun_api_key = get_config('mailgun_api_key') or get_app_config('MAILGUN_API_KEY')
    try:
        r = requests.post(
            mailgun_base_url + '/messages',
            auth=("api", mailgun_api_key),
            data={"from": "{} Admin <{}>".format(ctf_name, mailfrom_addr),
                  "to": [addr],
                  "subject": "Message from {0}".format(ctf_name),
                  "text": text},
            timeout=1.0
        )
    except requests.RequestException as e:
        return False, "{error} exception occured while handling your request".format(error=type(e).__name__)

    if r.status_code == 200:
        return True, "Email sent"
    else:
        return False, "Mailgun settings are incorrect"
コード例 #16
0
def sendmail(addr, text):
    ctf_name = get_config("ctf_name")
    mailfrom_addr = get_config("mailfrom_addr") or get_app_config("MAILFROM_ADDR")
    mailgun_base_url = get_config("mailgun_base_url") or get_app_config(
        "MAILGUN_BASE_URL"
    )
    mailgun_api_key = get_config("mailgun_api_key") or get_app_config("MAILGUN_API_KEY")
    try:
        print ("POSTing to Mailgun")
        maildata = {
                "from": "{} Admin <{}>".format(ctf_name, mailfrom_addr),
                "to": [addr],
                "subject": "Message from {0}".format(ctf_name),
                "text": text
            }
        print (maildata)
        print (mailgun_api_key)
        print (mailgun_base_url)
        r = requests.post(
            mailgun_base_url + "/messages",
            auth=("api", mailgun_api_key),
            data=maildata
        )
    except requests.RequestException as e:
        print ("Error communicationg with Mailgun", e)
        return (
            False,
            "{error} exception occured while handling your request".format(
                error=type(e).__name__
            ),
        )

    if r.status_code == 200:
        print ("Email sent")
        return True, "Email sent"
    else:
        print ("Problem sending email")
        return False, "Mailgun settings are incorrect"
コード例 #17
0
ファイル: __init__.py プロジェクト: wicakhendrik/CTFd
def export_ctf():
    # TODO: For some unknown reason dataset is only able to see alembic_version during tests.
    # Even using a real sqlite database. This makes this test impossible to pass in sqlite.
    db = dataset.connect(get_app_config("SQLALCHEMY_DATABASE_URI"))

    # Backup database
    backup = tempfile.NamedTemporaryFile()

    backup_zip = zipfile.ZipFile(backup, "w")

    tables = db.tables
    for table in tables:
        result = db[table].all()
        result_file = six.BytesIO()
        datafreeze.freeze(result, format="ctfd", fileobj=result_file)
        result_file.seek(0)
        backup_zip.writestr("db/{}.json".format(table), result_file.read())

    # # Guarantee that alembic_version is saved into the export
    if "alembic_version" not in tables:
        result = {
            "count": 1,
            "results": [{
                "version_num": get_current_revision()
            }],
            "meta": {},
        }
        result_file = six.BytesIO()
        json.dump(result, result_file)
        result_file.seek(0)
        backup_zip.writestr("db/alembic_version.json", result_file.read())

    # Backup uploads
    uploader = get_uploader()
    uploader.sync()

    upload_folder = os.path.join(os.path.normpath(app.root_path),
                                 app.config.get("UPLOAD_FOLDER"))
    for root, dirs, files in os.walk(upload_folder):
        for file in files:
            parent_dir = os.path.basename(root)
            backup_zip.write(
                os.path.join(root, file),
                arcname=os.path.join("uploads", parent_dir, file),
            )

    backup_zip.close()
    backup.seek(0)
    return backup
コード例 #18
0
    def reddit_login():
        endpoint = (get_app_config("REDDIT_AUTHORIZATION_ENDPOINT")
                    or get_config("reddit_authorization_endpoint")
                    or "https://ssl.reddit.com/api/v1/authorize")

        client_id = get_app_config("REDDIT_CLIENT_ID") or get_config(
            "reddit_client_id")
        callback_url = get_app_config("REDDIT_CALLBACK_URL") or get_config(
            "reddit_callback_url")

        if client_id is None:
            error_for(
                endpoint="reddit.login",
                message="Reddit OAuth Settings not configured. "
                "Ask your CTF administrator to configure Reddit integration.",
            )
            return redirect(url_for("auth.login"))

        redirect_url = "{endpoint}?client_id={client_id}&response_type=code&state={state}&redirect_uri={callback_url}&duration=temporary&scope=identity".format(
            endpoint=endpoint,
            client_id=client_id,
            state=session["nonce"],
            callback_url=callback_url)
        return redirect(redirect_url)
コード例 #19
0
ファイル: auth.py プロジェクト: Edgaraaa/DisPlatform
def oauth_redirect():
    oauth_code = request.args.get("code")
    state = request.args.get("state")
    if session["nonce"] != state:
        log("logins", "[{date}] {ip} - OAuth State validation mismatch")
        error_for(endpoint="auth.login",
                  message="OAuth State validation mismatch.")
        return redirect(url_for("auth.login"))

    if oauth_code:
        url = (get_app_config("OAUTH_TOKEN_ENDPOINT")
               or get_config("oauth_token_endpoint")
               or "https://auth.majorleaguecyber.org/oauth/token")

        client_id = get_app_config("OAUTH_CLIENT_ID") or get_config(
            "oauth_client_id")
        client_secret = get_app_config("OAUTH_CLIENT_SECRET") or get_config(
            "oauth_client_secret")
        headers = {"content-type": "application/x-www-form-urlencoded"}
        data = {
            "code": oauth_code,
            "client_id": client_id,
            "client_secret": client_secret,
            "grant_type": "authorization_code",
        }
        token_request = requests.post(url, data=data, headers=headers)

        if token_request.status_code == requests.codes.ok:
            token = token_request.json()["access_token"]
            user_url = (get_app_config("OAUTH_API_ENDPOINT")
                        or get_config("oauth_api_endpoint")
                        or "https://api.majorleaguecyber.org/user")

            headers = {
                "Authorization": "Bearer " + str(token),
                "Content-type": "application/json",
            }
            api_data = requests.get(url=user_url, headers=headers).json()

            user_id = api_data["id"]
            user_name = api_data["name"]
            user_email = api_data["email"]

            user = Users.query.filter_by(email=user_email).first()
            if user is None:
                # Check if we are allowing registration before creating users
                if registration_visible():
                    user = Users(
                        name=user_name,
                        email=user_email,
                        oauth_id=user_id,
                        verified=True,
                    )
                    db.session.add(user)
                    db.session.commit()
                else:
                    log("logins",
                        "[{date}] {ip} - Public registration via MLC blocked")
                    error_for(
                        endpoint="auth.login",
                        message=
                        "Public registration is disabled. Please try again later.",
                    )
                    return redirect(url_for("auth.login"))

            if get_config("user_mode") == TEAMS_MODE:
                team_id = api_data["team"]["id"]
                team_name = api_data["team"]["name"]

                team = Teams.query.filter_by(oauth_id=team_id).first()
                if team is None:
                    team = Teams(name=team_name,
                                 oauth_id=team_id,
                                 captain_id=user.id)
                    db.session.add(team)
                    db.session.commit()

                team.members.append(user)
                db.session.commit()

            if user.oauth_id is None:
                user.oauth_id = user_id
                user.verified = True
                db.session.commit()

            login_user(user)

            return redirect(url_for("challenges.listing"))
        else:
            log("logins", "[{date}] {ip} - OAuth token retrieval failure")
            error_for(endpoint="auth.login",
                      message="OAuth token retrieval failure.")
            return redirect(url_for("auth.login"))
    else:
        log("logins", "[{date}] {ip} - Received redirect without OAuth code")
        error_for(endpoint="auth.login",
                  message="Received redirect without OAuth code.")
        return redirect(url_for("auth.login"))
コード例 #20
0
ファイル: auth.py プロジェクト: shareef12/CTFd
def oauth_redirect():
    oauth_code = request.args.get('code')
    state = request.args.get('state')
    if session['nonce'] != state:
        log('logins', "[{date}] {ip} - OAuth State validation mismatch")
        error_for(endpoint='auth.login', message='OAuth State validation mismatch.')
        return redirect(url_for('auth.login'))

    if oauth_code:
        url = get_app_config('OAUTH_TOKEN_ENDPOINT') \
            or get_config('oauth_token_endpoint') \
            or 'https://auth.majorleaguecyber.org/oauth/token'

        client_id = get_app_config('OAUTH_CLIENT_ID') or get_config('oauth_client_id')
        client_secret = get_app_config('OAUTH_CLIENT_SECRET') or get_config('oauth_client_secret')
        headers = {
            'content-type': 'application/x-www-form-urlencoded'
        }
        data = {
            'code': oauth_code,
            'client_id': client_id,
            'client_secret': client_secret,
            'grant_type': 'authorization_code'
        }
        token_request = requests.post(url, data=data, headers=headers)

        if token_request.status_code == requests.codes.ok:
            token = token_request.json()['access_token']
            user_url = get_app_config('OAUTH_API_ENDPOINT') \
                or get_config('oauth_api_endpoint') \
                or 'http://api.majorleaguecyber.org/user'

            headers = {
                'Authorization': 'Bearer ' + str(token),
                'Content-type': 'application/json'
            }
            api_data = requests.get(url=user_url, headers=headers).json()

            user_id = api_data['id']
            user_name = api_data['name']
            user_email = api_data['email']

            user = Users.query.filter_by(email=user_email).first()
            if user is None:
                user = Users(
                    name=user_name,
                    email=user_email,
                    oauth_id=user_id,
                    verified=True
                )
                db.session.add(user)
                db.session.commit()

            if get_config('user_mode') == TEAMS_MODE:
                team_id = api_data['team']['id']
                team_name = api_data['team']['name']

                team = Teams.query.filter_by(oauth_id=team_id).first()
                if team is None:
                    team = Teams(
                        name=team_name,
                        oauth_id=team_id
                    )
                    db.session.add(team)
                    db.session.commit()

                team.members.append(user)
                db.session.commit()

            login_user(user)

            return redirect(url_for('challenges.listing'))
        else:
            log('logins', "[{date}] {ip} - OAuth token retrieval failure")
            error_for(
                endpoint='auth.login',
                message='OAuth token retrieval failure.'
            )
            return redirect(url_for('auth.login'))
    else:
        log('logins', "[{date}] {ip} - Received redirect without OAuth code")
        error_for(
            endpoint='auth.login',
            message='Received redirect without OAuth code.'
        )
        return redirect(url_for('auth.login'))
コード例 #21
0
ファイル: integrations.py プロジェクト: DevLuce/CTFd
def mlc():
    admin_config = get_config("oauth_client_id") and get_config(
        "oauth_client_secret")
    main_config = get_app_config("OAUTH_CLIENT_ID") and get_app_config(
        "OAUTH_CLIENT_SECRET")
    return admin_config or main_config
コード例 #22
0
ファイル: __init__.py プロジェクト: shareef12/CTFd
def import_ctf(backup, erase=True):
    if not zipfile.is_zipfile(backup):
        raise zipfile.BadZipfile

    if erase:
        drop_database()
        create_database()
        # We explicitly do not want to upgrade or stamp here.
        # The import will have this information.

    side_db = dataset.connect(get_app_config('SQLALCHEMY_DATABASE_URI'))
    sqlite = get_app_config('SQLALCHEMY_DATABASE_URI').startswith('sqlite')
    postgres = get_app_config('SQLALCHEMY_DATABASE_URI').startswith('postgres')

    backup = zipfile.ZipFile(backup)

    members = backup.namelist()
    max_content_length = get_app_config('MAX_CONTENT_LENGTH')
    for f in members:
        if f.startswith('/') or '..' in f:
            # Abort on malicious zip files
            raise zipfile.BadZipfile
        info = backup.getinfo(f)
        if max_content_length:
            if info.file_size > max_content_length:
                raise zipfile.LargeZipFile

    first = [
        'db/teams.json',
        'db/users.json',
        'db/challenges.json',
        'db/dynamic_challenge.json',
        'db/flags.json',
        'db/hints.json',
        'db/unlocks.json',
        'db/awards.json',
        'db/tags.json',
        'db/submissions.json',
        'db/solves.json',
        'db/files.json',
        'db/notifications.json',
        'db/pages.json',
        'db/tracking.json',
        'db/config.json',
    ]

    for item in first:
        if item in members:
            members.remove(item)

    members = first + members

    alembic_version = json.loads(
        backup.open(
            'db/alembic_version.json').read())["results"][0]["version_num"]
    upgrade(revision=alembic_version)
    members.remove('db/alembic_version.json')

    for member in members:
        if member.startswith('db/'):
            table_name = member[3:-5]

            try:
                # Try to open a file but skip if it doesn't exist.
                data = backup.open(member).read()
            except KeyError:
                continue

            if data:
                table = side_db[table_name]

                saved = json.loads(data)
                for entry in saved['results']:
                    # This is a hack to get SQLite to properly accept datetime values from dataset
                    # See Issue #246
                    if sqlite:
                        for k, v in entry.items():
                            if isinstance(v, six.string_types):
                                match = re.match(
                                    r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d",
                                    v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(
                                        v, '%Y-%m-%dT%H:%M:%S.%f')
                                    continue
                                match = re.match(
                                    r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(
                                        v, '%Y-%m-%dT%H:%M:%S')
                                    continue
                    table.insert(entry)
                    db.session.commit()
                if postgres:
                    # TODO: This should be sanitized even though exports are basically SQL dumps
                    # Databases are so hard
                    # https://stackoverflow.com/a/37972960
                    side_db.engine.execute(
                        "SELECT setval(pg_get_serial_sequence('{table_name}', 'id'), coalesce(max(id)+1,1), false) FROM {table_name}"
                        .format(table_name=table_name))

    # Extracting files
    files = [f for f in backup.namelist() if f.startswith('uploads/')]
    uploader = get_uploader()
    for f in files:
        filename = f.split(os.sep, 1)

        if len(filename
               ) < 2:  # just an empty uploads directory (e.g. uploads/)
            continue

        filename = filename[
            1]  # Get the second entry in the list (the actual filename)
        source = backup.open(f)
        uploader.store(fileobj=source, filename=filename)

    cache.clear()
コード例 #23
0
ファイル: oauth2.py プロジェクト: yakovk/CTFd
def load(app):
    ########################
    # Plugin Configuration #
    ########################
    authentication_url_prefix = "/auth"
    oauth_client_id = utils.get_app_config('OAUTHLOGIN_CLIENT_ID')
    oauth_client_secret = utils.get_app_config('OAUTHLOGIN_CLIENT_SECRET')
    oauth_provider = utils.get_app_config('OAUTHLOGIN_PROVIDER')
    create_missing_user = utils.get_app_config(
        'OAUTHLOGIN_CREATE_MISSING_USER')

    ##################
    # User Functions #
    ##################
    def retrieve_user_from_database(username):
        user = Users.query.filter_by(email=username).first()
        if user is not None:
            log('logins',
                "[{date}] {ip} - " + username + " - OAuth2 bridged user found")
            return user

    def create_user(username, displayName):
        with app.app_context():
            log(
                'logins', "[{date}] {ip} - " + username +
                " - No OAuth2 bridged user found, creating user")
            user = Users(email=username,
                         name=displayName.strip(),
                         verified=True)
            db.session.add(user)
            db.session.commit()
            return user

    def create_or_get_user(username, displayName):
        user = retrieve_user_from_database(username)
        if user is not None:
            return user
        if create_missing_user:
            return create_user(username, displayName)
        else:
            log(
                'logins', "[{date}] {ip} - " + username +
                " - No OAuth2 bridged user found and not configured to create missing users"
            )
            return None

    ##########################
    # Provider Configuration #
    ##########################
    provider_blueprints = {
        'azure':
        lambda: flask_dance.contrib.azure.make_azure_blueprint(
            login_url='/azure',
            client_id=oauth_client_id,
            client_secret=oauth_client_secret,
            redirect_url=authentication_url_prefix + "/azure/confirm"),
        'github':
        lambda: flask_dance.contrib.github.make_github_blueprint(
            login_url='/github',
            client_id=oauth_client_id,
            client_secret=oauth_client_secret,
            redirect_url=authentication_url_prefix + "/github/confirm")
    }

    def get_azure_user():
        resp = flask_dance.contrib.azure.azure.get("/v1.0/me")
        print(" * Azure.Get(/v1.0/me): %s" % resp.text)
        user_info = resp.json()
        return create_or_get_user(username=user_info["userPrincipalName"],
                                  displayName=user_info["displayName"])

    def get_github_user():
        user_info = flask_dance.contrib.github.github.get("/user").json()
        return create_or_get_user(username=user_info["email"],
                                  displayName=user_info["name"])

    provider_users = {
        'azure': lambda: get_azure_user(),
        'github': lambda: get_github_user()
    }

    if oauth_provider not in provider_blueprints:
        print(
            '** Skip loading CTFd-OAuth2 because of the unknown or unsupported OAuth provider **'
        )
        return

    provider_blueprint = provider_blueprints[oauth_provider](
    )  # Resolved lambda

    #######################
    # Blueprint Functions #
    #######################
    @provider_blueprint.route('/<string:auth_provider>/confirm',
                              methods=['GET'])
    def confirm_auth_provider(auth_provider):
        if auth_provider not in provider_users:
            return redirect('/')

        provider_user = provider_users[oauth_provider]()  # Resolved lambda
        if provider_user is not None:
            session.regenerate()
            login_user(provider_user)
            db.session.close()
        return redirect('/')

    app.register_blueprint(provider_blueprint,
                           url_prefix=authentication_url_prefix)

    ###############################
    # Application Reconfiguration #
    ###############################
    # ('', 204) is "No Content" code
    set_config('registration_visibility', False)
    app.view_functions['auth.login'] = lambda: redirect(
        authentication_url_prefix + "/" + oauth_provider)
    app.view_functions['auth.register'] = lambda: ('', 204)
    app.view_functions['auth.reset_password'] = lambda: ('', 204)
    app.view_functions['auth.confirm'] = lambda: ('', 204)
コード例 #24
0
ファイル: __init__.py プロジェクト: mrigank-9594/srhctf_
def import_ctf(backup, erase=True):
    if not zipfile.is_zipfile(backup):
        raise zipfile.BadZipfile

    if erase:
        drop_database()
        create_database()
        # We explicitly do not want to upgrade or stamp here.
        # The import will have this information.

    side_db = dataset.connect(get_app_config('SQLALCHEMY_DATABASE_URI'))
    sqlite = get_app_config('SQLALCHEMY_DATABASE_URI').startswith('sqlite')
    postgres = get_app_config('SQLALCHEMY_DATABASE_URI').startswith('postgres')

    backup = zipfile.ZipFile(backup)

    members = backup.namelist()
    max_content_length = get_app_config('MAX_CONTENT_LENGTH')
    for f in members:
        if f.startswith('/') or '..' in f:
            # Abort on malicious zip files
            raise zipfile.BadZipfile
        info = backup.getinfo(f)
        if max_content_length:
            if info.file_size > max_content_length:
                raise zipfile.LargeZipFile

    first = [
        'db/teams.json',
        'db/users.json',
        'db/challenges.json',
        'db/dynamic_challenge.json',

        'db/flags.json',
        'db/hints.json',
        'db/unlocks.json',
        'db/awards.json',
        'db/tags.json',

        'db/submissions.json',
        'db/solves.json',

        'db/files.json',

        'db/notifications.json',
        'db/pages.json',

        'db/tracking.json',
        'db/config.json',
    ]

    for item in first:
        if item in members:
            members.remove(item)

    members = first + members

    alembic_version = json.loads(backup.open('db/alembic_version.json').read())["results"][0]["version_num"]
    upgrade(revision=alembic_version)

    # Create tables created by plugins
    try:
        app.db.create_all()
    except OperationalError as e:
        if not postgres:
            raise e
        else:
            print("Allowing error during app.db.create_all() due to Postgres")

    members.remove('db/alembic_version.json')

    for member in members:
        if member.startswith('db/'):
            table_name = member[3:-5]

            try:
                # Try to open a file but skip if it doesn't exist.
                data = backup.open(member).read()
            except KeyError:
                continue

            if data:
                table = side_db[table_name]

                saved = json.loads(data)
                for entry in saved['results']:
                    # This is a hack to get SQLite to properly accept datetime values from dataset
                    # See Issue #246
                    if sqlite:
                        for k, v in entry.items():
                            if isinstance(v, six.string_types):
                                match = re.match(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d", v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(v, '%Y-%m-%dT%H:%M:%S.%f')
                                    continue
                                match = re.match(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(v, '%Y-%m-%dT%H:%M:%S')
                                    continue
                    # From v2.0.0 to v2.1.0 requirements could have been a string or JSON because of a SQLAlchemy issue
                    # This is a hack to ensure we can still accept older exports. See #867
                    if member in ('db/challenges.json', 'db/hints.json', 'db/awards.json'):
                        requirements = entry.get('requirements')
                        if requirements and isinstance(requirements, six.string_types):
                            entry['requirements'] = json.loads(requirements)

                    try:
                        table.insert(entry)
                    except ProgrammingError:
                        # MariaDB does not like JSON objects and prefers strings because it internally
                        # represents JSON with LONGTEXT.
                        # See Issue #973
                        requirements = entry.get('requirements')
                        if requirements and isinstance(requirements, dict):
                            entry['requirements'] = json.dumps(requirements)
                        table.insert(entry)

                    db.session.commit()
                if postgres:
                    # This command is to set the next primary key ID for the re-inserted tables in Postgres. However,
                    # this command is very difficult to translate into SQLAlchemy code. Because Postgres is not
                    # officially supported, no major work will go into this functionality.
                    # https://stackoverflow.com/a/37972960
                    if '"' not in table_name and '\'' not in table_name:
                        query = "SELECT setval(pg_get_serial_sequence('{table_name}', 'id'), coalesce(max(id)+1,1), false) FROM \"{table_name}\"".format(  # nosec
                            table_name=table_name
                        )
                        side_db.engine.execute(query)
                    else:
                        raise Exception('Table name {table_name} contains quotes'.format(table_name=table_name))

    # Extracting files
    files = [f for f in backup.namelist() if f.startswith('uploads/')]
    uploader = get_uploader()
    for f in files:
        filename = f.split(os.sep, 1)

        if len(filename) < 2:  # just an empty uploads directory (e.g. uploads/)
            continue

        filename = filename[1]  # Get the second entry in the list (the actual filename)
        source = backup.open(f)
        uploader.store(fileobj=source, filename=filename)

    # Alembic sqlite support is lacking so we should just create_all anyway
    try:
        upgrade(revision='head')
    except (CommandError, RuntimeError, SystemExit):
        app.db.create_all()
        stamp()

    # Invalidate all cached data
    cache.clear()

    # Set default theme in case the current instance or the import does not provide it
    set_config('ctf_theme', 'core')
コード例 #25
0
ファイル: __init__.py プロジェクト: wicakhendrik/CTFd
def import_ctf(backup, erase=True):
    if not zipfile.is_zipfile(backup):
        raise zipfile.BadZipfile

    backup = zipfile.ZipFile(backup)

    members = backup.namelist()
    max_content_length = get_app_config("MAX_CONTENT_LENGTH")
    for f in members:
        if f.startswith("/") or ".." in f:
            # Abort on malicious zip files
            raise zipfile.BadZipfile
        info = backup.getinfo(f)
        if max_content_length:
            if info.file_size > max_content_length:
                raise zipfile.LargeZipFile

    try:
        alembic_version = json.loads(
            backup.open("db/alembic_version.json").read())
        alembic_version = alembic_version["results"][0]["version_num"]
    except Exception:
        raise Exception(
            "Could not determine appropriate database version. This backup cannot be automatically imported."
        )

    # Check if the alembic version is from CTFd 1.x
    if alembic_version in (
            "1ec4a28fe0ff",
            "2539d8b5082e",
            "7e9efd084c5a",
            "87733981ca0e",
            "a4e30c94c360",
            "c12d2a1b0926",
            "c7225db614c1",
            "cb3cfcc47e2f",
            "cbf5620f8e15",
            "d5a224bf5862",
            "d6514ec92738",
            "dab615389702",
            "e62fd69bd417",
    ):
        raise Exception(
            "The version of CTFd that this backup is from is too old to be automatically imported."
        )

    if erase:
        drop_database()
        create_database()
        # We explicitly do not want to upgrade or stamp here.
        # The import will have this information.

    side_db = dataset.connect(get_app_config("SQLALCHEMY_DATABASE_URI"))
    sqlite = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("sqlite")
    postgres = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("postgres")

    try:
        if postgres:
            side_db.query("SET session_replication_role=replica;")
        else:
            side_db.query("SET FOREIGN_KEY_CHECKS=0;")
    except Exception:
        print("Failed to disable foreign key checks. Continuing.")

    first = [
        "db/teams.json",
        "db/users.json",
        "db/challenges.json",
        "db/dynamic_challenge.json",
        "db/flags.json",
        "db/hints.json",
        "db/unlocks.json",
        "db/awards.json",
        "db/tags.json",
        "db/submissions.json",
        "db/solves.json",
        "db/files.json",
        "db/notifications.json",
        "db/pages.json",
        "db/tracking.json",
        "db/config.json",
    ]

    for item in first:
        if item in members:
            members.remove(item)

    members = first + members

    upgrade(revision=alembic_version)

    # Create tables created by plugins
    try:
        app.db.create_all()
    except OperationalError as e:
        if not postgres:
            raise e
        else:
            print("Allowing error during app.db.create_all() due to Postgres")

    members.remove("db/alembic_version.json")

    for member in members:
        if member.startswith("db/"):
            table_name = member[3:-5]

            try:
                # Try to open a file but skip if it doesn't exist.
                data = backup.open(member).read()
            except KeyError:
                continue

            if data:
                table = side_db[table_name]

                saved = json.loads(data)
                for entry in saved["results"]:
                    # This is a hack to get SQLite to properly accept datetime values from dataset
                    # See Issue #246
                    if sqlite:
                        for k, v in entry.items():
                            if isinstance(v, six.string_types):
                                match = re.match(
                                    r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d",
                                    v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(
                                        v, "%Y-%m-%dT%H:%M:%S.%f")
                                    continue
                                match = re.match(
                                    r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(
                                        v, "%Y-%m-%dT%H:%M:%S")
                                    continue
                    # From v2.0.0 to v2.1.0 requirements could have been a string or JSON because of a SQLAlchemy issue
                    # This is a hack to ensure we can still accept older exports. See #867
                    if member in (
                            "db/challenges.json",
                            "db/hints.json",
                            "db/awards.json",
                    ):
                        requirements = entry.get("requirements")
                        if requirements and isinstance(requirements,
                                                       six.string_types):
                            entry["requirements"] = json.loads(requirements)

                    try:
                        table.insert(entry)
                    except ProgrammingError:
                        # MariaDB does not like JSON objects and prefers strings because it internally
                        # represents JSON with LONGTEXT.
                        # See Issue #973
                        requirements = entry.get("requirements")
                        if requirements and isinstance(requirements, dict):
                            entry["requirements"] = json.dumps(requirements)
                        table.insert(entry)

                    db.session.commit()
                if postgres:
                    # This command is to set the next primary key ID for the re-inserted tables in Postgres. However,
                    # this command is very difficult to translate into SQLAlchemy code. Because Postgres is not
                    # officially supported, no major work will go into this functionality.
                    # https://stackoverflow.com/a/37972960
                    if '"' not in table_name and "'" not in table_name:
                        query = "SELECT setval(pg_get_serial_sequence('{table_name}', 'id'), coalesce(max(id)+1,1), false) FROM \"{table_name}\"".format(  # nosec
                            table_name=table_name)
                        side_db.engine.execute(query)
                    else:
                        raise Exception(
                            "Table name {table_name} contains quotes".format(
                                table_name=table_name))

    # Extracting files
    files = [f for f in backup.namelist() if f.startswith("uploads/")]
    uploader = get_uploader()
    for f in files:
        filename = f.split(os.sep, 1)

        if len(filename
               ) < 2:  # just an empty uploads directory (e.g. uploads/)
            continue

        filename = filename[
            1]  # Get the second entry in the list (the actual filename)
        source = backup.open(f)
        uploader.store(fileobj=source, filename=filename)

    # Alembic sqlite support is lacking so we should just create_all anyway
    try:
        upgrade(revision="head")
    except (OperationalError, CommandError, RuntimeError, SystemExit,
            Exception):
        app.db.create_all()
        stamp_latest_revision()

    try:
        if postgres:
            side_db.query("SET session_replication_role=DEFAULT;")
        else:
            side_db.query("SET FOREIGN_KEY_CHECKS=1;")
    except Exception:
        print("Failed to enable foreign key checks. Continuing.")

    # Invalidate all cached data
    cache.clear()

    # Set default theme in case the current instance or the import does not provide it
    set_config("ctf_theme", "core")
コード例 #26
0
def register():
    errors = get_errors()
    if request.method == "POST" and get_app_config('REGISTRATIONS_ENABLED',
                                                   True):
        name = request.form.get("name", "").strip()
        email_address = request.form.get("email", "").strip().lower()
        password = request.form.get("password", "").strip()

        name_len = len(name) == 0
        names = Users.query.add_columns("name",
                                        "id").filter_by(name=name).first()
        emails = (Users.query.add_columns(
            "email", "id").filter_by(email=email_address).first())
        pass_short = len(password) == 0
        pass_long = len(password) > 128
        valid_email = validators.validate_email(email_address)
        team_name_email_check = validators.validate_email(name)

        if not valid_email:
            errors.append("Please enter a valid email address")
        if email.check_email_is_whitelisted(email_address) is False:
            errors.append(
                "Only email addresses under {domains} may register".format(
                    domains=get_config("domain_whitelist")))
        if names:
            errors.append("That user name is already taken")
        if team_name_email_check is True:
            errors.append("Your user name cannot be an email address")
        if emails:
            errors.append("That email has already been used")
        if pass_short:
            errors.append("Pick a longer password")
        if pass_long:
            errors.append("Pick a shorter password")
        if name_len:
            errors.append("Pick a longer user name")

        if len(errors) > 0:
            return render_template(
                "register.html",
                errors=errors,
                name=request.form["name"],
                email=request.form["email"],
                password=request.form["password"],
            )
        else:
            with app.app_context():
                user = Users(name=name, email=email_address, password=password)
                db.session.add(user)
                db.session.commit()
                db.session.flush()

                login_user(user)

                if config.can_send_mail() and get_config(
                        "verify_emails"
                ):  # Confirming users is enabled and we can send email.
                    log(
                        "registrations",
                        format=
                        "[{date}] {ip} - {name} registered (UNCONFIRMED) with {email}",
                    )
                    email.verify_email_address(user.email)
                    db.session.close()
                    return redirect(url_for("auth.confirm"))
                else:  # Don't care about confirming users
                    if (
                            config.can_send_mail()
                    ):  # We want to notify the user that they have registered.
                        email.successful_registration_notification(user.email)

        log("registrations", "[{date}] {ip} - {name} registered with {email}")
        db.session.close()

        if is_teams_mode():
            return redirect(url_for("teams.private"))

        return redirect(url_for("challenges.listing"))
    else:
        return render_template("register.html", errors=errors)
コード例 #27
0
ファイル: auth.py プロジェクト: Gu-f/CTFd_chinese_CN
def oauth_redirect():
    oauth_code = request.args.get("code")
    state = request.args.get("state")
    if session["nonce"] != state:
        log("logins", "[{date}] {ip} - OAuth State validation mismatch")
        error_for(endpoint="auth.login",
                  message="OAuth State validation mismatch.")
        return redirect(url_for("auth.login"))

    if oauth_code:
        url = (get_app_config("OAUTH_TOKEN_ENDPOINT")
               or get_config("oauth_token_endpoint")
               or "https://auth.majorleaguecyber.org/oauth/token")

        client_id = get_app_config("OAUTH_CLIENT_ID") or get_config(
            "oauth_client_id")
        client_secret = get_app_config("OAUTH_CLIENT_SECRET") or get_config(
            "oauth_client_secret")
        headers = {"content-type": "application/x-www-form-urlencoded"}
        data = {
            "code": oauth_code,
            "client_id": client_id,
            "client_secret": client_secret,
            "grant_type": "authorization_code",
        }
        token_request = requests.post(url, data=data, headers=headers)

        if token_request.status_code == requests.codes.ok:
            token = token_request.json()["access_token"]
            user_url = (get_app_config("OAUTH_API_ENDPOINT")
                        or get_config("oauth_api_endpoint")
                        or "https://api.majorleaguecyber.org/user")

            headers = {
                "Authorization": "Bearer " + str(token),
                "Content-type": "application/json",
            }
            api_data = requests.get(url=user_url, headers=headers).json()

            user_id = api_data["id"]
            user_name = api_data["name"]
            user_email = api_data["email"]

            user = Users.query.filter_by(email=user_email).first()
            if user is None:
                # Check if we are allowing registration before creating users
                if registration_visible() or mlc_registration():
                    user = Users(
                        name=user_name,
                        email=user_email,
                        oauth_id=user_id,
                        verified=True,
                    )
                    db.session.add(user)
                    db.session.commit()
                else:
                    log("logins",
                        "[{date}] {ip} - Public registration via MLC blocked")
                    error_for(
                        endpoint="auth.login",
                        message=
                        "Public registration is disabled. Please try again later.",
                    )
                    return redirect(url_for("auth.login"))

            if get_config("user_mode") == TEAMS_MODE:
                team_id = api_data["team"]["id"]
                team_name = api_data["team"]["name"]

                team = Teams.query.filter_by(oauth_id=team_id).first()
                if team is None:
                    num_teams_limit = int(get_config("num_teams", default=0))
                    num_teams = Teams.query.filter_by(banned=False,
                                                      hidden=False).count()
                    if num_teams_limit and num_teams >= num_teams_limit:
                        abort(
                            403,
                            description=
                            f"Reached the maximum number of teams ({num_teams_limit}). Please join an existing team.",
                        )

                    team = Teams(name=team_name,
                                 oauth_id=team_id,
                                 captain_id=user.id)
                    db.session.add(team)
                    db.session.commit()
                    clear_team_session(team_id=team.id)

                team_size_limit = get_config("team_size", default=0)
                if team_size_limit and len(team.members) >= team_size_limit:
                    plural = "" if team_size_limit == 1 else "s"
                    size_error = "Teams are limited to {limit} member{plural}.".format(
                        limit=team_size_limit, plural=plural)
                    error_for(endpoint="auth.login", message=size_error)
                    return redirect(url_for("auth.login"))

                team.members.append(user)
                db.session.commit()

            if user.oauth_id is None:
                user.oauth_id = user_id
                user.verified = True
                db.session.commit()
                clear_user_session(user_id=user.id)

            login_user(user)

            return redirect(url_for("challenges.listing"))
        else:
            log("logins", "[{date}] {ip} - OAuth token retrieval failure")
            error_for(endpoint="auth.login",
                      message="OAuth token retrieval failure.")
            return redirect(url_for("auth.login"))
    else:
        log("logins", "[{date}] {ip} - Received redirect without OAuth code")
        error_for(endpoint="auth.login",
                  message="Received redirect without OAuth code.")
        return redirect(url_for("auth.login"))
コード例 #28
0
ファイル: uploaders.py プロジェクト: elfreki/CTFd
 def __init__(self):
     super(BaseUploader, self).__init__()
     self.s3 = self._get_s3_connection()
     self.bucket = get_app_config("AWS_S3_BUCKET")
コード例 #29
0
def import_ctf(backup, erase=True):
    if not zipfile.is_zipfile(backup):
        raise zipfile.BadZipfile

    backup = zipfile.ZipFile(backup)

    members = backup.namelist()
    max_content_length = get_app_config("MAX_CONTENT_LENGTH")
    for f in members:
        if f.startswith("/") or ".." in f:
            # Abort on malicious zip files
            raise zipfile.BadZipfile
        info = backup.getinfo(f)
        if max_content_length:
            if info.file_size > max_content_length:
                raise zipfile.LargeZipFile

    # Get list of directories in zipfile
    member_dirs = [os.path.split(m)[0] for m in members if "/" in m]
    if "db" not in member_dirs:
        raise Exception(
            'CTFd couldn\'t find the "db" folder in this backup. '
            "The backup may be malformed or corrupted and the import process cannot continue."
        )

    try:
        alembic_version = json.loads(
            backup.open("db/alembic_version.json").read())
        alembic_version = alembic_version["results"][0]["version_num"]
    except Exception:
        raise Exception(
            "Could not determine appropriate database version. This backup cannot be automatically imported."
        )

    # Check if the alembic version is from CTFd 1.x
    if alembic_version in (
            "1ec4a28fe0ff",
            "2539d8b5082e",
            "7e9efd084c5a",
            "87733981ca0e",
            "a4e30c94c360",
            "c12d2a1b0926",
            "c7225db614c1",
            "cb3cfcc47e2f",
            "cbf5620f8e15",
            "d5a224bf5862",
            "d6514ec92738",
            "dab615389702",
            "e62fd69bd417",
    ):
        raise Exception(
            "The version of CTFd that this backup is from is too old to be automatically imported."
        )

    sqlite = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("sqlite")
    postgres = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("postgres")
    mysql = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("mysql")

    if erase:
        # Clear out existing connections to release any locks
        db.session.close()
        db.engine.dispose()

        # Kill sleeping processes on MySQL so we don't get a metadata lock
        # In my testing I didn't find that Postgres or SQLite needed the same treatment
        # Only run this when not in tests as we can't isolate the queries out
        # This is a very dirty hack. Don't try this at home kids.
        if mysql and get_app_config("TESTING", default=False) is False:
            url = make_url(get_app_config("SQLALCHEMY_DATABASE_URI"))
            r = db.session.execute("SHOW PROCESSLIST")
            processes = r.fetchall()
            for proc in processes:
                if (proc.Command == "Sleep" and proc.User == url.username
                        and proc.db == url.database):
                    proc_id = proc.Id
                    db.session.execute(f"KILL {proc_id}")

        # Drop database and recreate it to get to a clean state
        drop_database()
        create_database()
        # We explicitly do not want to upgrade or stamp here.
        # The import will have this information.

    side_db = dataset.connect(get_app_config("SQLALCHEMY_DATABASE_URI"))

    try:
        if postgres:
            side_db.query("SET session_replication_role=replica;")
        else:
            side_db.query("SET FOREIGN_KEY_CHECKS=0;")
    except Exception:
        print("Failed to disable foreign key checks. Continuing.")

    first = [
        "db/teams.json",
        "db/users.json",
        "db/challenges.json",
        "db/dynamic_challenge.json",
        "db/flags.json",
        "db/hints.json",
        "db/unlocks.json",
        "db/awards.json",
        "db/tags.json",
        "db/submissions.json",
        "db/solves.json",
        "db/files.json",
        "db/notifications.json",
        "db/pages.json",
        "db/tracking.json",
        "db/config.json",
    ]

    # We want to insert certain database tables first so we are specifying
    # the order with a list. The leftover tables are tables that are from a
    # plugin (more likely) or a table where we do not care about insertion order
    for item in first:
        if item in members:
            members.remove(item)

    # Upgrade the database to the point in time that the import was taken from
    migration_upgrade(revision=alembic_version)

    members.remove("db/alembic_version.json")

    # Combine the database insertion code into a function so that we can pause
    # insertion between official database tables and plugin tables
    def insertion(table_filenames):
        for member in table_filenames:
            if member.startswith("db/"):
                table_name = member[3:-5]

                try:
                    # Try to open a file but skip if it doesn't exist.
                    data = backup.open(member).read()
                except KeyError:
                    continue

                if data:
                    table = side_db[table_name]

                    saved = json.loads(data)
                    for entry in saved["results"]:
                        # This is a hack to get SQLite to properly accept datetime values from dataset
                        # See Issue #246
                        if sqlite:
                            direct_table = get_class_by_tablename(table.name)
                            for k, v in entry.items():
                                if isinstance(v, string_types):
                                    # We only want to apply this hack to columns that are expecting a datetime object
                                    try:
                                        is_dt_column = (type(
                                            getattr(
                                                direct_table,
                                                k).type) == sqltypes.DateTime)
                                    except AttributeError:
                                        is_dt_column = False

                                    # If the table is expecting a datetime, we should check if the string is one and convert it
                                    if is_dt_column:
                                        match = re.match(
                                            r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d",
                                            v,
                                        )
                                        if match:
                                            entry[
                                                k] = datetime.datetime.strptime(
                                                    v, "%Y-%m-%dT%H:%M:%S.%f")
                                            continue
                                        match = re.match(
                                            r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}",
                                            v)
                                        if match:
                                            entry[
                                                k] = datetime.datetime.strptime(
                                                    v, "%Y-%m-%dT%H:%M:%S")
                                            continue
                        # From v2.0.0 to v2.1.0 requirements could have been a string or JSON because of a SQLAlchemy issue
                        # This is a hack to ensure we can still accept older exports. See #867
                        if member in (
                                "db/challenges.json",
                                "db/hints.json",
                                "db/awards.json",
                        ):
                            requirements = entry.get("requirements")
                            if requirements and isinstance(
                                    requirements, string_types):
                                entry["requirements"] = json.loads(
                                    requirements)

                        try:
                            table.insert(entry)
                        except ProgrammingError:
                            # MariaDB does not like JSON objects and prefers strings because it internally
                            # represents JSON with LONGTEXT.
                            # See Issue #973
                            requirements = entry.get("requirements")
                            if requirements and isinstance(requirements, dict):
                                entry["requirements"] = json.dumps(
                                    requirements)
                            table.insert(entry)

                        db.session.commit()
                    if postgres:
                        # This command is to set the next primary key ID for the re-inserted tables in Postgres. However,
                        # this command is very difficult to translate into SQLAlchemy code. Because Postgres is not
                        # officially supported, no major work will go into this functionality.
                        # https://stackoverflow.com/a/37972960
                        if '"' not in table_name and "'" not in table_name:
                            query = "SELECT setval(pg_get_serial_sequence('{table_name}', 'id'), coalesce(max(id)+1,1), false) FROM \"{table_name}\"".format(  # nosec
                                table_name=table_name)
                            side_db.engine.execute(query)
                        else:
                            raise Exception(
                                "Table name {table_name} contains quotes".
                                format(table_name=table_name))

    # Insert data from official tables
    insertion(first)

    # Create tables created by plugins
    # Run plugin migrations
    plugins = get_plugin_names()
    for plugin in plugins:
        revision = plugin_current(plugin_name=plugin)
        plugin_upgrade(plugin_name=plugin, revision=revision, lower=None)

    # Insert data for plugin tables
    insertion(members)

    # Bring plugin tables up to head revision
    plugins = get_plugin_names()
    for plugin in plugins:
        plugin_upgrade(plugin_name=plugin)

    # Extracting files
    files = [f for f in backup.namelist() if f.startswith("uploads/")]
    uploader = get_uploader()
    for f in files:
        filename = f.split(os.sep, 1)

        if (
                len(filename) < 2 or os.path.basename(filename[1]) == ""
        ):  # just an empty uploads directory (e.g. uploads/) or any directory
            continue

        filename = filename[
            1]  # Get the second entry in the list (the actual filename)
        source = backup.open(f)
        uploader.store(fileobj=source, filename=filename)

    # Alembic sqlite support is lacking so we should just create_all anyway
    if sqlite:
        app.db.create_all()
        stamp_latest_revision()
    else:
        # Run migrations to bring to latest version
        migration_upgrade(revision="head")
        # Create any leftover tables, perhaps from old plugins
        app.db.create_all()

    try:
        if postgres:
            side_db.query("SET session_replication_role=DEFAULT;")
        else:
            side_db.query("SET FOREIGN_KEY_CHECKS=1;")
    except Exception:
        print("Failed to enable foreign key checks. Continuing.")

    # Invalidate all cached data
    cache.clear()

    # Set default theme in case the current instance or the import does not provide it
    set_config("ctf_theme", "core")
    set_config("ctf_version", CTFD_VERSION)
コード例 #30
0
ファイル: __init__.py プロジェクト: csnp/njit-ctf
def ctf_theme_candidates():
    yield ctf_theme()
    if bool(get_app_config("THEME_FALLBACK")):
        yield DEFAULT_THEME