def sendmail(addr, text, subject): ctf_name = get_config("ctf_name") mailfrom_addr = get_config("mailfrom_addr") or get_app_config( "MAILFROM_ADDR") mailfrom_addr = "{} <{}>".format(ctf_name, mailfrom_addr) mailgun_base_url = get_config("mailgun_base_url") or get_app_config( "MAILGUN_BASE_URL") mailgun_api_key = get_config("mailgun_api_key") or get_app_config( "MAILGUN_API_KEY") try: r = requests.post( mailgun_base_url + "/messages", auth=("api", mailgun_api_key), data={ "from": mailfrom_addr, "to": [addr], "subject": subject, "text": text, }, timeout=1.0, ) except requests.RequestException as e: return ( False, "{error} exception occured while handling your request".format( error=type(e).__name__), ) if r.status_code == 200: return True, "Email sent" else: return False, "Mailgun settings are incorrect"
def oauth_login(): endpoint = (get_app_config("OAUTH_AUTHORIZATION_ENDPOINT") or get_config("oauth_authorization_endpoint") or "https://auth.majorleaguecyber.org/oauth/authorize") if get_config("user_mode") == "teams": scope = "profile team" else: scope = "profile" client_id = get_app_config("OAUTH_CLIENT_ID") or get_config( "oauth_client_id") if client_id is None: error_for( endpoint="auth.login", message="OAuth Settings not configured. " "Ask your CTF administrator to configure MajorLeagueCyber integration.", ) return redirect(url_for("auth.login")) redirect_url = "{endpoint}?response_type=code&client_id={client_id}&scope={scope}&state={state}".format( endpoint=endpoint, client_id=client_id, scope=scope, state=session["nonce"]) return redirect(redirect_url)
def _get_s3_connection(self): access_key = get_app_config("AWS_ACCESS_KEY_ID") secret_key = get_app_config("AWS_SECRET_ACCESS_KEY") endpoint = get_app_config("AWS_S3_ENDPOINT_URL") client = boto3.client( "s3", aws_access_key_id=access_key, aws_secret_access_key=secret_key, endpoint_url=endpoint, ) return client
def update_check(force=False): """ Makes a request to kmactf to check if there is a new version of KMActf available. The service is provided in return for users opting in to anonymous usage data collection. Users can opt-out of update checks by specifying UPDATE_CHECK = False in config.py :param force: :return: """ # If UPDATE_CHECK is disabled don't check for updates at all. if app.config.get("UPDATE_CHECK") is False: return # Don't do an update check if not setup if is_setup() is False: return # Get when we should check for updates next. next_update_check = get_config("next_update_check") or 0 # If we have passed our saved time or we are forcing we should check. update = (next_update_check < time.time()) or force if update: try: name = str(get_config("ctf_name")) or "" params = { "ctf_id": sha256(name), "current": app.VERSION, "python_version_raw": sys.hexversion, "python_version": python_version(), "db_driver": db.session.bind.dialect.name, "challenge_count": Challenges.query.count(), "user_mode": get_config("user_mode"), "user_count": Users.query.count(), "team_count": Teams.query.count(), "theme": get_config("ctf_theme"), "upload_provider": get_app_config("UPLOAD_PROVIDER"), } check = requests.post("https://actvn.edu.vn/", json=params, timeout=0.1).json() except requests.exceptions.RequestException: pass except ValueError: pass else: try: latest = check["resource"]["tag"] html_url = check["resource"]["html_url"] if StrictVersion(latest) > StrictVersion(app.VERSION): set_config("version_latest", html_url) elif StrictVersion(latest) <= StrictVersion(app.VERSION): set_config("version_latest", None) next_update_check_time = check["resource"].get( "next", int(time.time() + 43200)) set_config("next_update_check", next_update_check_time) except KeyError: set_config("version_latest", None)
def is_human(captcha_response): """ Validating recaptcha response from google server Returns True captcha test passed for submitted form else returns False. """ secret = get_app_config('RC_SECRET_KEY') payload = {'response': captcha_response, 'secret': secret} response = requests.post("https://www.google.com/recaptcha/api/siteverify", payload) response_text = json.loads(response.text) return response_text['success']
def subscribe(): @stream_with_context def gen(): for event in current_app.events_manager.subscribe(): yield str(event) enabled = get_app_config("SERVER_SENT_EVENTS") if enabled is False: return ("", 204) return Response(gen(), mimetype="text/event-stream")
def export_ctf(): # TODO: For some unknown reason dataset is only able to see alembic_version during tests. # Even using a real sqlite database. This makes this test impossible to pass in sqlite. db = dataset.connect(get_app_config("SQLALCHEMY_DATABASE_URI")) # Backup database backup = tempfile.NamedTemporaryFile() backup_zip = zipfile.ZipFile(backup, "w") tables = db.tables for table in tables: result = db[table].all() result_file = six.BytesIO() datafreeze.freeze(result, format="kmactf", fileobj=result_file) result_file.seek(0) backup_zip.writestr("db/{}.json".format(table), result_file.read()) # # Guarantee that alembic_version is saved into the export if "alembic_version" not in tables: result = { "count": 1, "results": [{ "version_num": get_current_revision() }], "meta": {}, } result_file = six.BytesIO() json.dump(result, result_file) result_file.seek(0) backup_zip.writestr("db/alembic_version.json", result_file.read()) # Backup uploads uploader = get_uploader() uploader.sync() upload_folder = os.path.join(os.path.normpath(app.root_path), app.config.get("UPLOAD_FOLDER")) for root, dirs, files in os.walk(upload_folder): for file in files: parent_dir = os.path.basename(root) backup_zip.write( os.path.join(root, file), arcname=os.path.join("uploads", parent_dir, file), ) backup_zip.close() backup.seek(0) return backup
def sendmail(addr, text, subject): ctf_name = get_config("ctf_name") mailfrom_addr = get_config("mailfrom_addr") or get_app_config( "MAILFROM_ADDR") mailfrom_addr = "{} <{}>".format(ctf_name, mailfrom_addr) data = { "host": get_config("mail_server") or get_app_config("MAIL_SERVER"), "port": int(get_config("mail_port") or get_app_config("MAIL_PORT")), } username = get_config("mail_username") or get_app_config("MAIL_USERNAME") password = get_config("mail_password") or get_app_config("MAIL_PASSWORD") TLS = get_config("mail_tls") or get_app_config("MAIL_TLS") SSL = get_config("mail_ssl") or get_app_config("MAIL_SSL") auth = get_config("mail_useauth") or get_app_config("MAIL_USEAUTH") if username: data["username"] = username if password: data["password"] = password if TLS: data["TLS"] = TLS if SSL: data["SSL"] = SSL if auth: data["auth"] = auth try: smtp = get_smtp(**data) msg = MIMEText(text) msg["Subject"] = subject msg["From"] = mailfrom_addr msg["To"] = addr smtp.sendmail(msg["From"], [msg["To"]], msg.as_string()) smtp.quit() return True, "Email sent" except smtplib.SMTPException as e: return False, str(e) except timeout: return False, "SMTP server connection timed out" except Exception as e: return False, str(e)
def import_ctf(backup, erase=True): if not zipfile.is_zipfile(backup): raise zipfile.BadZipfile backup = zipfile.ZipFile(backup) members = backup.namelist() max_content_length = get_app_config("MAX_CONTENT_LENGTH") for f in members: if f.startswith("/") or ".." in f: # Abort on malicious zip files raise zipfile.BadZipfile info = backup.getinfo(f) if max_content_length: if info.file_size > max_content_length: raise zipfile.LargeZipFile try: alembic_version = json.loads( backup.open("db/alembic_version.json").read()) alembic_version = alembic_version["results"][0]["version_num"] except Exception: raise Exception( "Could not determine appropriate database version. This backup cannot be automatically imported." ) # Check if the alembic version is from KMActf 1.x if alembic_version in ( "1ec4a28fe0ff", "2539d8b5082e", "7e9efd084c5a", "87733981ca0e", "a4e30c94c360", "c12d2a1b0926", "c7225db614c1", "cb3cfcc47e2f", "cbf5620f8e15", "d5a224bf5862", "d6514ec92738", "dab615389702", "e62fd69bd417", ): raise Exception( "The version of KMActf that this backup is from is too old to be automatically imported." ) if erase: drop_database() create_database() # We explicitly do not want to upgrade or stamp here. # The import will have this information. side_db = dataset.connect(get_app_config("SQLALCHEMY_DATABASE_URI")) sqlite = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("sqlite") postgres = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("postgres") try: if postgres: side_db.query("SET session_replication_role=replica;") else: side_db.query("SET FOREIGN_KEY_CHECKS=0;") except Exception: print("Failed to disable foreign key checks. Continuing.") first = [ "db/teams.json", "db/users.json", "db/challenges.json", "db/dynamic_challenge.json", "db/flags.json", "db/hints.json", "db/unlocks.json", "db/awards.json", "db/tags.json", "db/submissions.json", "db/solves.json", "db/files.json", "db/notifications.json", "db/pages.json", "db/tracking.json", "db/config.json", ] for item in first: if item in members: members.remove(item) members = first + members upgrade(revision=alembic_version) # Create tables created by plugins try: app.db.create_all() except OperationalError as e: if not postgres: raise e else: print("Allowing error during app.db.create_all() due to Postgres") members.remove("db/alembic_version.json") for member in members: if member.startswith("db/"): table_name = member[3:-5] try: # Try to open a file but skip if it doesn't exist. data = backup.open(member).read() except KeyError: continue if data: table = side_db[table_name] saved = json.loads(data) for entry in saved["results"]: # This is a hack to get SQLite to properly accept datetime values from dataset # See Issue #246 if sqlite: direct_table = get_class_by_tablename(table.name) for k, v in entry.items(): if isinstance(v, six.string_types): # We only want to apply this hack to columns that are expecting a datetime object try: is_dt_column = (type( getattr(direct_table, k).type) == sqltypes.DateTime) except AttributeError: is_dt_column = False # If the table is expecting a datetime, we should check if the string is one and convert it if is_dt_column: match = re.match( r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d", v) if match: entry[k] = datetime.datetime.strptime( v, "%Y-%m-%dT%H:%M:%S.%f") continue match = re.match( r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", v) if match: entry[k] = datetime.datetime.strptime( v, "%Y-%m-%dT%H:%M:%S") continue # From v2.0.0 to v2.1.0 requirements could have been a string or JSON because of a SQLAlchemy issue # This is a hack to ensure we can still accept older exports. See #867 if member in ( "db/challenges.json", "db/hints.json", "db/awards.json", ): requirements = entry.get("requirements") if requirements and isinstance(requirements, six.string_types): entry["requirements"] = json.loads(requirements) try: table.insert(entry) except ProgrammingError: # MariaDB does not like JSON objects and prefers strings because it internally # represents JSON with LONGTEXT. # See Issue #973 requirements = entry.get("requirements") if requirements and isinstance(requirements, dict): entry["requirements"] = json.dumps(requirements) table.insert(entry) db.session.commit() if postgres: # This command is to set the next primary key ID for the re-inserted tables in Postgres. However, # this command is very difficult to translate into SQLAlchemy code. Because Postgres is not # officially supported, no major work will go into this functionality. # https://stackoverflow.com/a/37972960 if '"' not in table_name and "'" not in table_name: query = "SELECT setval(pg_get_serial_sequence('{table_name}', 'id'), coalesce(max(id)+1,1), false) FROM \"{table_name}\"".format( # nosec table_name=table_name) side_db.engine.execute(query) else: raise Exception( "Table name {table_name} contains quotes".format( table_name=table_name)) # Extracting files files = [f for f in backup.namelist() if f.startswith("uploads/")] uploader = get_uploader() for f in files: filename = f.split(os.sep, 1) if ( len(filename) < 2 or os.path.basename(filename[1]) == "" ): # just an empty uploads directory (e.g. uploads/) or any directory continue filename = filename[ 1] # Get the second entry in the list (the actual filename) source = backup.open(f) uploader.store(fileobj=source, filename=filename) # Alembic sqlite support is lacking so we should just create_all anyway try: upgrade(revision="head") except (OperationalError, CommandError, RuntimeError, SystemExit, Exception): app.db.create_all() stamp_latest_revision() try: if postgres: side_db.query("SET session_replication_role=DEFAULT;") else: side_db.query("SET FOREIGN_KEY_CHECKS=1;") except Exception: print("Failed to enable foreign key checks. Continuing.") # Invalidate all cached data cache.clear() # Set default theme in case the current instance or the import does not provide it set_config("ctf_theme", "core")
def __init__(self): super(BaseUploader, self).__init__() self.s3 = self._get_s3_connection() self.bucket = get_app_config("AWS_S3_BUCKET")
def get_uploader(): return UPLOADERS.get(get_app_config("UPLOAD_PROVIDER") or "filesystem")()
def oauth_redirect(): oauth_code = request.args.get("code") state = request.args.get("state") if session["nonce"] != state: log("logins", "[{date}] {ip} - OAuth State validation mismatch") error_for(endpoint="auth.login", message="OAuth State validation mismatch.") return redirect(url_for("auth.login")) if oauth_code: url = (get_app_config("OAUTH_TOKEN_ENDPOINT") or get_config("oauth_token_endpoint") or "https://auth.majorleaguecyber.org/oauth/token") client_id = get_app_config("OAUTH_CLIENT_ID") or get_config( "oauth_client_id") client_secret = get_app_config("OAUTH_CLIENT_SECRET") or get_config( "oauth_client_secret") headers = {"content-type": "application/x-www-form-urlencoded"} data = { "code": oauth_code, "client_id": client_id, "client_secret": client_secret, "grant_type": "authorization_code", } token_request = requests.post(url, data=data, headers=headers) if token_request.status_code == requests.codes.ok: token = token_request.json()["access_token"] user_url = (get_app_config("OAUTH_API_ENDPOINT") or get_config("oauth_api_endpoint") or "https://api.majorleaguecyber.org/user") headers = { "Authorization": "Bearer " + str(token), "Content-type": "application/json", } api_data = requests.get(url=user_url, headers=headers).json() user_id = api_data["id"] user_name = api_data["name"] user_email = api_data["email"] user = Users.query.filter_by(email=user_email).first() if user is None: # Check if we are allowing registration before creating users if registration_visible() or mlc_registration(): user = Users( name=user_name, email=user_email, oauth_id=user_id, verified=True, ) db.session.add(user) db.session.commit() else: log("logins", "[{date}] {ip} - Public registration via MLC blocked") error_for( endpoint="auth.login", message= "Public registration is disabled. Please try again later.", ) return redirect(url_for("auth.login")) if get_config("user_mode") == TEAMS_MODE: team_id = api_data["team"]["id"] team_name = api_data["team"]["name"] team = Teams.query.filter_by(oauth_id=team_id).first() if team is None: team = Teams(name=team_name, oauth_id=team_id, captain_id=user.id) db.session.add(team) db.session.commit() team_size_limit = get_config("team_size", default=0) if team_size_limit and len(team.members) >= team_size_limit: plural = "" if team_size_limit == 1 else "s" size_error = "Teams are limited to {limit} member{plural}.".format( limit=team_size_limit, plural=plural) error_for(endpoint="auth.login", message=size_error) return redirect(url_for("auth.login")) team.members.append(user) db.session.commit() if user.oauth_id is None: user.oauth_id = user_id user.verified = True db.session.commit() login_user(user) return redirect(url_for("challenges.listing")) else: log("logins", "[{date}] {ip} - OAuth token retrieval failure") error_for(endpoint="auth.login", message="OAuth token retrieval failure.") return redirect(url_for("auth.login")) else: log("logins", "[{date}] {ip} - Received redirect without OAuth code") error_for(endpoint="auth.login", message="Received redirect without OAuth code.") return redirect(url_for("auth.login"))