예제 #1
0
def create_app(config="CTFd.config.Config"):
    app = CTFdFlask(__name__)
    with app.app_context():
        app.config.from_object(config)

        loaders = []
        # We provide a `DictLoader` which may be used to override templates
        app.overridden_templates = {}
        loaders.append(jinja2.DictLoader(app.overridden_templates))
        # A `ThemeLoader` with no `theme_name` will load from the current theme
        loaders.append(ThemeLoader())
        # If `THEME_FALLBACK` is set and true, we add another loader which will
        # load from the `DEFAULT_THEME` - this mirrors the order implemented by
        # `config.ctf_theme_candidates()`
        if bool(app.config.get("THEME_FALLBACK")):
            loaders.append(ThemeLoader(theme_name=DEFAULT_THEME))
        # All themes including admin can be accessed by prefixing their name
        prefix_loader_dict = {ADMIN_THEME: ThemeLoader(theme_name=ADMIN_THEME)}
        for theme_name in CTFd.utils.config.get_themes():
            prefix_loader_dict[theme_name] = ThemeLoader(theme_name=theme_name)
        loaders.append(jinja2.PrefixLoader(prefix_loader_dict))
        # Plugin templates are also accessed via prefix but we just point a
        # normal `FileSystemLoader` at the plugin tree rather than validating
        # each plugin here (that happens later in `init_plugins()`). We
        # deliberately don't add this to `prefix_loader_dict` defined above
        # because to do so would break template loading from a theme called
        # `prefix` (even though that'd be weird).
        plugin_loader = jinja2.FileSystemLoader(searchpath=os.path.join(
            app.root_path, "plugins"),
                                                followlinks=True)
        loaders.append(jinja2.PrefixLoader({"plugins": plugin_loader}))
        # Use a choice loader to find the first match from our list of loaders
        app.jinja_loader = jinja2.ChoiceLoader(loaders)

        from CTFd.models import (  # noqa: F401
            db, Teams, Solves, Challenges, Fails, Flags, Tags, Files, Tracking,
        )

        url = create_database()

        # This allows any changes to the SQLALCHEMY_DATABASE_URI to get pushed back in
        # This is mostly so we can force MySQL's charset
        app.config["SQLALCHEMY_DATABASE_URI"] = str(url)

        # Register database
        db.init_app(app)

        # Register Flask-Migrate
        migrations.init_app(app, db)

        # Alembic sqlite support is lacking so we should just create_all anyway
        if url.drivername.startswith("sqlite"):
            # Enable foreign keys for SQLite. This must be before the
            # db.create_all call because tests use the in-memory SQLite
            # database (each connection, including db creation, is a new db).
            # https://docs.sqlalchemy.org/en/13/dialects/sqlite.html#foreign-key-support
            from sqlalchemy.engine import Engine
            from sqlalchemy import event

            @event.listens_for(Engine, "connect")
            def set_sqlite_pragma(dbapi_connection, connection_record):
                cursor = dbapi_connection.cursor()
                cursor.execute("PRAGMA foreign_keys=ON")
                cursor.close()

            db.create_all()
            stamp_latest_revision()
        else:
            # This creates tables instead of db.create_all()
            # Allows migrations to happen properly
            upgrade()

        from CTFd.models import ma

        ma.init_app(app)

        app.db = db
        app.VERSION = __version__
        app.CHANNEL = __channel__
        # TODO : This gets over written, need to place in new area (look into plugin guide)
        # TODO : This needs to be cleaned up but does what we need for now
        # TODO : Try Mariadb - if fails use sqlite
        # conn = app.engine.connect()
        qry = 'SHOW TABLES;'

        table_name_data = app.db.session.execute(qry).fetchall()
        table_collection = get_current_tables(table_name_data)

        if 'association' in table_collection:
            print(' - Association Table Found!')
        else:
            print(' - Association Table does not exist, generating. . .')
            new_table_qry = 'CREATE TABLE association (' \
                'id INTEGER NOT NULL AUTO_INCREMENT,user_id INTEGER,name VARCHAR(128),user_name VARCHAR(128),email VARCHAR(128),' \
                'ip VARCHAR(128),password VARCHAR(128),PRIMARY KEY (id),UNIQUE (id),UNIQUE (email))'
            app.db.session.execute(new_table_qry)

            table_name_data = app.db.session.execute(qry).fetchall()
            print('Found: {}'.format(table_name_data))
            table_collection = get_current_tables(table_name_data)

            if 'association' in table_collection:
                print(' - Association Table Found!\n')
            else:
                print(
                    'Association Table Failed to be created! We cannot continue'
                )

        from CTFd.cache import cache

        cache.init_app(app)
        app.cache = cache

        reverse_proxy = app.config.get("REVERSE_PROXY")
        if reverse_proxy:
            if type(reverse_proxy) is str and "," in reverse_proxy:
                proxyfix_args = [int(i) for i in reverse_proxy.split(",")]
                app.wsgi_app = ProxyFix(app.wsgi_app, *proxyfix_args)
            else:
                app.wsgi_app = ProxyFix(app.wsgi_app,
                                        x_for=1,
                                        x_proto=1,
                                        x_host=1,
                                        x_port=1,
                                        x_prefix=1)

        version = utils.get_config("ctf_version")

        # Upgrading from an older version of CTFd
        if version and (StrictVersion(version) < StrictVersion(__version__)):
            if confirm_upgrade():
                run_upgrade()
            else:
                exit()

        if not version:
            utils.set_config("ctf_version", __version__)

        if not utils.get_config("ctf_theme"):
            utils.set_config("ctf_theme", DEFAULT_THEME)

        update_check(force=True)

        init_request_processors(app)
        init_template_filters(app)
        init_template_globals(app)

        # Importing here allows tests to use sensible names (e.g. api instead of api_bp)
        from CTFd.views import views
        from CTFd.teams import teams
        from CTFd.users import users
        from CTFd.challenges import challenges
        from CTFd.scoreboard import scoreboard
        from CTFd.auth import auth
        from CTFd.admin import admin
        from CTFd.api import api
        from CTFd.events import events
        from CTFd.errors import render_error

        app.register_blueprint(views)
        app.register_blueprint(teams)
        app.register_blueprint(users)
        app.register_blueprint(challenges)
        app.register_blueprint(scoreboard)
        app.register_blueprint(auth)
        app.register_blueprint(api)
        app.register_blueprint(events)

        app.register_blueprint(admin)

        for code in {403, 404, 500, 502}:
            app.register_error_handler(code, render_error)

        init_logs(app)
        init_events(app)
        init_plugins(app)

        return app
예제 #2
0
def import_ctf(backup, erase=True):
    if not zipfile.is_zipfile(backup):
        raise zipfile.BadZipfile

    backup = zipfile.ZipFile(backup)

    members = backup.namelist()
    max_content_length = get_app_config("MAX_CONTENT_LENGTH")
    for f in members:
        if f.startswith("/") or ".." in f:
            # Abort on malicious zip files
            raise zipfile.BadZipfile
        info = backup.getinfo(f)
        if max_content_length:
            if info.file_size > max_content_length:
                raise zipfile.LargeZipFile

    try:
        alembic_version = json.loads(
            backup.open("db/alembic_version.json").read())
        alembic_version = alembic_version["results"][0]["version_num"]
    except Exception:
        raise Exception(
            "Could not determine appropriate database version. This backup cannot be automatically imported."
        )

    # Check if the alembic version is from CTFd 1.x
    if alembic_version in (
            "1ec4a28fe0ff",
            "2539d8b5082e",
            "7e9efd084c5a",
            "87733981ca0e",
            "a4e30c94c360",
            "c12d2a1b0926",
            "c7225db614c1",
            "cb3cfcc47e2f",
            "cbf5620f8e15",
            "d5a224bf5862",
            "d6514ec92738",
            "dab615389702",
            "e62fd69bd417",
    ):
        raise Exception(
            "The version of CTFd that this backup is from is too old to be automatically imported."
        )

    if erase:
        drop_database()
        create_database()
        # We explicitly do not want to upgrade or stamp here.
        # The import will have this information.

    side_db = dataset.connect(get_app_config("SQLALCHEMY_DATABASE_URI"))
    sqlite = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("sqlite")
    postgres = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("postgres")

    try:
        if postgres:
            side_db.query("SET session_replication_role=replica;")
        else:
            side_db.query("SET FOREIGN_KEY_CHECKS=0;")
    except Exception:
        print("Failed to disable foreign key checks. Continuing.")

    first = [
        "db/teams.json",
        "db/users.json",
        "db/challenges.json",
        "db/dynamic_challenge.json",
        "db/flags.json",
        "db/hints.json",
        "db/unlocks.json",
        "db/awards.json",
        "db/tags.json",
        "db/submissions.json",
        "db/solves.json",
        "db/files.json",
        "db/notifications.json",
        "db/pages.json",
        "db/tracking.json",
        "db/config.json",
    ]

    for item in first:
        if item in members:
            members.remove(item)

    members = first + members

    upgrade(revision=alembic_version)

    # Create tables created by plugins
    try:
        app.db.create_all()
    except OperationalError as e:
        if not postgres:
            raise e
        else:
            print("Allowing error during app.db.create_all() due to Postgres")

    members.remove("db/alembic_version.json")

    for member in members:
        if member.startswith("db/"):
            table_name = member[3:-5]

            try:
                # Try to open a file but skip if it doesn't exist.
                data = backup.open(member).read()
            except KeyError:
                continue

            if data:
                table = side_db[table_name]

                saved = json.loads(data)
                for entry in saved["results"]:
                    # This is a hack to get SQLite to properly accept datetime values from dataset
                    # See Issue #246
                    if sqlite:
                        for k, v in entry.items():
                            if isinstance(v, six.string_types):
                                match = re.match(
                                    r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d",
                                    v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(
                                        v, "%Y-%m-%dT%H:%M:%S.%f")
                                    continue
                                match = re.match(
                                    r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(
                                        v, "%Y-%m-%dT%H:%M:%S")
                                    continue
                    # From v2.0.0 to v2.1.0 requirements could have been a string or JSON because of a SQLAlchemy issue
                    # This is a hack to ensure we can still accept older exports. See #867
                    if member in (
                            "db/challenges.json",
                            "db/hints.json",
                            "db/awards.json",
                    ):
                        requirements = entry.get("requirements")
                        if requirements and isinstance(requirements,
                                                       six.string_types):
                            entry["requirements"] = json.loads(requirements)

                    try:
                        table.insert(entry)
                    except ProgrammingError:
                        # MariaDB does not like JSON objects and prefers strings because it internally
                        # represents JSON with LONGTEXT.
                        # See Issue #973
                        requirements = entry.get("requirements")
                        if requirements and isinstance(requirements, dict):
                            entry["requirements"] = json.dumps(requirements)
                        table.insert(entry)

                    db.session.commit()
                if postgres:
                    # This command is to set the next primary key ID for the re-inserted tables in Postgres. However,
                    # this command is very difficult to translate into SQLAlchemy code. Because Postgres is not
                    # officially supported, no major work will go into this functionality.
                    # https://stackoverflow.com/a/37972960
                    if '"' not in table_name and "'" not in table_name:
                        query = "SELECT setval(pg_get_serial_sequence('{table_name}', 'id'), coalesce(max(id)+1,1), false) FROM \"{table_name}\"".format(  # nosec
                            table_name=table_name)
                        side_db.engine.execute(query)
                    else:
                        raise Exception(
                            "Table name {table_name} contains quotes".format(
                                table_name=table_name))

    # Extracting files
    files = [f for f in backup.namelist() if f.startswith("uploads/")]
    uploader = get_uploader()
    for f in files:
        filename = f.split(os.sep, 1)

        if len(filename
               ) < 2:  # just an empty uploads directory (e.g. uploads/)
            continue

        filename = filename[
            1]  # Get the second entry in the list (the actual filename)
        source = backup.open(f)
        uploader.store(fileobj=source, filename=filename)

    # Alembic sqlite support is lacking so we should just create_all anyway
    try:
        upgrade(revision="head")
    except (OperationalError, CommandError, RuntimeError, SystemExit,
            Exception):
        app.db.create_all()
        stamp_latest_revision()

    try:
        if postgres:
            side_db.query("SET session_replication_role=DEFAULT;")
        else:
            side_db.query("SET FOREIGN_KEY_CHECKS=1;")
    except Exception:
        print("Failed to enable foreign key checks. Continuing.")

    # Invalidate all cached data
    cache.clear()

    # Set default theme in case the current instance or the import does not provide it
    set_config("ctf_theme", "core")
예제 #3
0
파일: __init__.py 프로젝트: FatPuffer/CTFD
def create_app(config='CTFd.config.Config'):
    app = CTFdFlask(__name__)
    with app.app_context():
        app.config.from_object(config)

        theme_loader = ThemeLoader(os.path.join(app.root_path, 'themes'),
                                   followlinks=True)
        app.jinja_loader = theme_loader

        from CTFd.models import db, Teams, Solves, Challenges, Fails, Flags, Tags, Files, Tracking

        url = create_database()

        # This allows any changes to the SQLALCHEMY_DATABASE_URI to get pushed back in
        # This is mostly so we can force MySQL's charset
        app.config['SQLALCHEMY_DATABASE_URI'] = str(url)

        # Register database
        db.init_app(app)

        # Register Flask-Migrate
        migrations.init_app(app, db)

        # Alembic sqlite support is lacking so we should just create_all anyway
        if url.drivername.startswith('sqlite'):
            db.create_all()
            stamp()
        else:
            # This creates tables instead of db.create_all()
            # Allows migrations to happen properly
            upgrade()

        from CTFd.models import ma

        ma.init_app(app)

        app.db = db
        app.VERSION = __version__

        from CTFd.cache import cache

        cache.init_app(app)
        app.cache = cache

        # If you have multiple workers you must have a shared cache
        socketio.init_app(app,
                          async_mode=app.config.get('SOCKETIO_ASYNC_MODE'),
                          message_queue=app.config.get('CACHE_REDIS_URL'))

        if app.config.get('REVERSE_PROXY'):
            app.wsgi_app = ProxyFix(app.wsgi_app)

        version = utils.get_config('ctf_version')

        # Upgrading from an older version of CTFd
        if version and (StrictVersion(version) < StrictVersion(__version__)):
            if confirm_upgrade():
                run_upgrade()
            else:
                exit()

        if not version:
            utils.set_config('ctf_version', __version__)

        if not utils.get_config('ctf_theme'):
            utils.set_config('ctf_theme', 'core')

        update_check(force=True)

        init_request_processors(app)
        init_template_filters(app)
        init_template_globals(app)

        # Importing here allows tests to use sensible names (e.g. api instead of api_bp)
        from CTFd.views import views
        from CTFd.teams import teams
        from CTFd.users import users
        from CTFd.challenges import challenges
        from CTFd.scoreboard import scoreboard
        from CTFd.auth import auth
        from CTFd.admin import admin
        from CTFd.api import api
        from CTFd.events import events
        from CTFd.errors import page_not_found, forbidden, general_error, gateway_error

        app.register_blueprint(views)
        app.register_blueprint(teams)
        app.register_blueprint(users)
        app.register_blueprint(challenges)
        app.register_blueprint(scoreboard)
        app.register_blueprint(auth)
        app.register_blueprint(api)
        app.register_blueprint(events)

        app.register_blueprint(admin)

        app.register_error_handler(404, page_not_found)
        app.register_error_handler(403, forbidden)
        app.register_error_handler(500, general_error)
        app.register_error_handler(502, gateway_error)

        init_plugins(app)

        return app
예제 #4
0
파일: __init__.py 프로젝트: yakovk/CTFd
def create_app(config="CTFd.config.Config"):
    app = CTFdFlask(__name__)
    with app.app_context():
        app.config.from_object(config)

        app.theme_loader = ThemeLoader(os.path.join(app.root_path, "themes"),
                                       followlinks=True)
        # Weird nested solution for accessing plugin templates
        app.plugin_loader = jinja2.PrefixLoader({
            "plugins":
            jinja2.FileSystemLoader(searchpath=os.path.join(
                app.root_path, "plugins"),
                                    followlinks=True)
        })
        # Load from themes first but fallback to loading from the plugin folder
        app.jinja_loader = jinja2.ChoiceLoader(
            [app.theme_loader, app.plugin_loader])

        from CTFd.models import (  # noqa: F401
            db, Teams, Solves, Challenges, Fails, Flags, Tags, Files, Tracking,
        )

        url = create_database()

        # This allows any changes to the SQLALCHEMY_DATABASE_URI to get pushed back in
        # This is mostly so we can force MySQL's charset
        app.config["SQLALCHEMY_DATABASE_URI"] = str(url)

        # Register database
        db.init_app(app)

        # Register Flask-Migrate
        migrations.init_app(app, db)

        # Alembic sqlite support is lacking so we should just create_all anyway
        if url.drivername.startswith("sqlite"):
            # Enable foreign keys for SQLite. This must be before the
            # db.create_all call because tests use the in-memory SQLite
            # database (each connection, including db creation, is a new db).
            # https://docs.sqlalchemy.org/en/13/dialects/sqlite.html#foreign-key-support
            from sqlalchemy.engine import Engine
            from sqlalchemy import event

            @event.listens_for(Engine, "connect")
            def set_sqlite_pragma(dbapi_connection, connection_record):
                cursor = dbapi_connection.cursor()
                cursor.execute("PRAGMA foreign_keys=ON")
                cursor.close()

            db.create_all()
            stamp_latest_revision()
        else:
            # This creates tables instead of db.create_all()
            # Allows migrations to happen properly
            upgrade()

        from CTFd.models import ma

        ma.init_app(app)

        app.db = db
        app.VERSION = __version__
        app.CHANNEL = __channel__

        from CTFd.cache import cache

        cache.init_app(app)
        app.cache = cache

        reverse_proxy = app.config.get("REVERSE_PROXY")
        if reverse_proxy:
            if type(reverse_proxy) is str and "," in reverse_proxy:
                proxyfix_args = [int(i) for i in reverse_proxy.split(",")]
                app.wsgi_app = ProxyFix(app.wsgi_app, *proxyfix_args)
            else:
                app.wsgi_app = ProxyFix(app.wsgi_app,
                                        x_for=1,
                                        x_proto=1,
                                        x_host=1,
                                        x_port=1,
                                        x_prefix=1)

        version = utils.get_config("ctf_version")

        # Upgrading from an older version of CTFd
        if version and (StrictVersion(version) < StrictVersion(__version__)):
            if confirm_upgrade():
                run_upgrade()
            else:
                exit()

        if not version:
            utils.set_config("ctf_version", __version__)

        if not utils.get_config("ctf_theme"):
            utils.set_config("ctf_theme", "core")

        update_check(force=True)

        init_request_processors(app)
        init_template_filters(app)
        init_template_globals(app)

        # Importing here allows tests to use sensible names (e.g. api instead of api_bp)
        from CTFd.views import views
        from CTFd.teams import teams
        from CTFd.users import users
        from CTFd.challenges import challenges
        from CTFd.scoreboard import scoreboard
        from CTFd.auth import auth
        from CTFd.admin import admin
        from CTFd.api import api
        from CTFd.events import events
        from CTFd.errors import page_not_found, forbidden, general_error, gateway_error

        app.register_blueprint(views)
        app.register_blueprint(teams)
        app.register_blueprint(users)
        app.register_blueprint(challenges)
        app.register_blueprint(scoreboard)
        app.register_blueprint(auth)
        app.register_blueprint(api)
        app.register_blueprint(events)

        app.register_blueprint(admin)

        app.register_error_handler(404, page_not_found)
        app.register_error_handler(403, forbidden)
        app.register_error_handler(500, general_error)
        app.register_error_handler(502, gateway_error)

        init_logs(app)
        init_events(app)
        init_plugins(app)

        return app
예제 #5
0
def create_app(config="CTFd.config.Config"):
    app = CTFdFlask(__name__)
    with app.app_context():
        app.config.from_object(config)

        theme_loader = ThemeLoader(os.path.join(app.root_path, "themes"),
                                   followlinks=True)
        app.jinja_loader = theme_loader

        from CTFd.models import (  # noqa: F401
            db, Teams, Solves, Challenges, Fails, Flags, Tags, Files, Tracking,
        )

        url = create_database()

        # This allows any changes to the SQLALCHEMY_DATABASE_URI to get pushed back in
        # This is mostly so we can force MySQL's charset
        app.config["SQLALCHEMY_DATABASE_URI"] = str(url)

        # Register database
        db.init_app(app)

        # Register Flask-Migrate
        migrations.init_app(app, db)

        # Alembic sqlite support is lacking so we should just create_all anyway
        if url.drivername.startswith("sqlite"):
            db.create_all()
            stamp_latest_revision()
        else:
            # This creates tables instead of db.create_all()
            # Allows migrations to happen properly
            upgrade()

        from CTFd.models import ma

        ma.init_app(app)

        app.db = db
        app.VERSION = __version__

        from CTFd.cache import cache

        cache.init_app(app)
        app.cache = cache

        reverse_proxy = app.config.get("REVERSE_PROXY")
        if reverse_proxy:
            if "," in reverse_proxy:
                proxyfix_args = [int(i) for i in reverse_proxy.split(",")]
                app.wsgi_app = ProxyFix(app.wsgi_app, None, *proxyfix_args)
            else:
                app.wsgi_app = ProxyFix(
                    app.wsgi_app,
                    num_proxies=None,
                    x_for=1,
                    x_proto=1,
                    x_host=1,
                    x_port=1,
                    x_prefix=1,
                )

        version = utils.get_config("ctf_version")

        # Upgrading from an older version of CTFd
        if version and (StrictVersion(version) < StrictVersion(__version__)):
            if confirm_upgrade():
                run_upgrade()
            else:
                exit()

        if not version:
            utils.set_config("ctf_version", __version__)

        if not utils.get_config("ctf_theme"):
            utils.set_config("ctf_theme", "core")

        update_check(force=True)

        init_request_processors(app)
        init_template_filters(app)
        init_template_globals(app)

        # Importing here allows tests to use sensible names (e.g. api instead of api_bp)
        from CTFd.views import views
        from CTFd.teams import teams
        from CTFd.users import users
        from CTFd.challenges import challenges
        from CTFd.scoreboard import scoreboard
        from CTFd.auth import auth
        from CTFd.admin import admin
        from CTFd.api import api
        from CTFd.events import events
        from CTFd.errors import page_not_found, forbidden, general_error, gateway_error

        app.register_blueprint(views)
        app.register_blueprint(teams)
        app.register_blueprint(users)
        app.register_blueprint(challenges)
        app.register_blueprint(scoreboard)
        app.register_blueprint(auth)
        app.register_blueprint(api)
        app.register_blueprint(events)

        app.register_blueprint(admin)

        app.register_error_handler(404, page_not_found)
        app.register_error_handler(403, forbidden)
        app.register_error_handler(500, general_error)
        app.register_error_handler(502, gateway_error)

        init_logs(app)
        init_events(app)
        init_plugins(app)

        return app
예제 #6
0
def import_ctf(backup, erase=True):
    if not zipfile.is_zipfile(backup):
        raise zipfile.BadZipfile

    backup = zipfile.ZipFile(backup)

    members = backup.namelist()
    max_content_length = get_app_config("MAX_CONTENT_LENGTH")
    for f in members:
        if f.startswith("/") or ".." in f:
            # Abort on malicious zip files
            raise zipfile.BadZipfile
        info = backup.getinfo(f)
        if max_content_length:
            if info.file_size > max_content_length:
                raise zipfile.LargeZipFile

    # Get list of directories in zipfile
    member_dirs = [os.path.split(m)[0] for m in members if "/" in m]
    if "db" not in member_dirs:
        raise Exception(
            'CTFd couldn\'t find the "db" folder in this backup. '
            "The backup may be malformed or corrupted and the import process cannot continue."
        )

    try:
        alembic_version = json.loads(
            backup.open("db/alembic_version.json").read())
        alembic_version = alembic_version["results"][0]["version_num"]
    except Exception:
        raise Exception(
            "Could not determine appropriate database version. This backup cannot be automatically imported."
        )

    # Check if the alembic version is from CTFd 1.x
    if alembic_version in (
            "1ec4a28fe0ff",
            "2539d8b5082e",
            "7e9efd084c5a",
            "87733981ca0e",
            "a4e30c94c360",
            "c12d2a1b0926",
            "c7225db614c1",
            "cb3cfcc47e2f",
            "cbf5620f8e15",
            "d5a224bf5862",
            "d6514ec92738",
            "dab615389702",
            "e62fd69bd417",
    ):
        raise Exception(
            "The version of CTFd that this backup is from is too old to be automatically imported."
        )

    sqlite = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("sqlite")
    postgres = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("postgres")
    mysql = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("mysql")

    if erase:
        # Clear out existing connections to release any locks
        db.session.close()
        db.engine.dispose()

        # Kill sleeping processes on MySQL so we don't get a metadata lock
        # In my testing I didn't find that Postgres or SQLite needed the same treatment
        # Only run this when not in tests as we can't isolate the queries out
        # This is a very dirty hack. Don't try this at home kids.
        if mysql and get_app_config("TESTING", default=False) is False:
            url = make_url(get_app_config("SQLALCHEMY_DATABASE_URI"))
            r = db.session.execute("SHOW PROCESSLIST")
            processes = r.fetchall()
            for proc in processes:
                if (proc.Command == "Sleep" and proc.User == url.username
                        and proc.db == url.database):
                    proc_id = proc.Id
                    db.session.execute(f"KILL {proc_id}")

        # Drop database and recreate it to get to a clean state
        drop_database()
        create_database()
        # We explicitly do not want to upgrade or stamp here.
        # The import will have this information.

    side_db = dataset.connect(get_app_config("SQLALCHEMY_DATABASE_URI"))

    try:
        if postgres:
            side_db.query("SET session_replication_role=replica;")
        else:
            side_db.query("SET FOREIGN_KEY_CHECKS=0;")
    except Exception:
        print("Failed to disable foreign key checks. Continuing.")

    first = [
        "db/teams.json",
        "db/users.json",
        "db/challenges.json",
        "db/dynamic_challenge.json",
        "db/flags.json",
        "db/hints.json",
        "db/unlocks.json",
        "db/awards.json",
        "db/tags.json",
        "db/submissions.json",
        "db/solves.json",
        "db/files.json",
        "db/notifications.json",
        "db/pages.json",
        "db/tracking.json",
        "db/config.json",
    ]

    # We want to insert certain database tables first so we are specifying
    # the order with a list. The leftover tables are tables that are from a
    # plugin (more likely) or a table where we do not care about insertion order
    for item in first:
        if item in members:
            members.remove(item)

    # Upgrade the database to the point in time that the import was taken from
    migration_upgrade(revision=alembic_version)

    members.remove("db/alembic_version.json")

    # Combine the database insertion code into a function so that we can pause
    # insertion between official database tables and plugin tables
    def insertion(table_filenames):
        for member in table_filenames:
            if member.startswith("db/"):
                table_name = member[3:-5]

                try:
                    # Try to open a file but skip if it doesn't exist.
                    data = backup.open(member).read()
                except KeyError:
                    continue

                if data:
                    table = side_db[table_name]

                    saved = json.loads(data)
                    for entry in saved["results"]:
                        # This is a hack to get SQLite to properly accept datetime values from dataset
                        # See Issue #246
                        if sqlite:
                            direct_table = get_class_by_tablename(table.name)
                            for k, v in entry.items():
                                if isinstance(v, string_types):
                                    # We only want to apply this hack to columns that are expecting a datetime object
                                    try:
                                        is_dt_column = (type(
                                            getattr(
                                                direct_table,
                                                k).type) == sqltypes.DateTime)
                                    except AttributeError:
                                        is_dt_column = False

                                    # If the table is expecting a datetime, we should check if the string is one and convert it
                                    if is_dt_column:
                                        match = re.match(
                                            r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d",
                                            v,
                                        )
                                        if match:
                                            entry[
                                                k] = datetime.datetime.strptime(
                                                    v, "%Y-%m-%dT%H:%M:%S.%f")
                                            continue
                                        match = re.match(
                                            r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}",
                                            v)
                                        if match:
                                            entry[
                                                k] = datetime.datetime.strptime(
                                                    v, "%Y-%m-%dT%H:%M:%S")
                                            continue
                        # From v2.0.0 to v2.1.0 requirements could have been a string or JSON because of a SQLAlchemy issue
                        # This is a hack to ensure we can still accept older exports. See #867
                        if member in (
                                "db/challenges.json",
                                "db/hints.json",
                                "db/awards.json",
                        ):
                            requirements = entry.get("requirements")
                            if requirements and isinstance(
                                    requirements, string_types):
                                entry["requirements"] = json.loads(
                                    requirements)

                        try:
                            table.insert(entry)
                        except ProgrammingError:
                            # MariaDB does not like JSON objects and prefers strings because it internally
                            # represents JSON with LONGTEXT.
                            # See Issue #973
                            requirements = entry.get("requirements")
                            if requirements and isinstance(requirements, dict):
                                entry["requirements"] = json.dumps(
                                    requirements)
                            table.insert(entry)

                        db.session.commit()
                    if postgres:
                        # This command is to set the next primary key ID for the re-inserted tables in Postgres. However,
                        # this command is very difficult to translate into SQLAlchemy code. Because Postgres is not
                        # officially supported, no major work will go into this functionality.
                        # https://stackoverflow.com/a/37972960
                        if '"' not in table_name and "'" not in table_name:
                            query = "SELECT setval(pg_get_serial_sequence('{table_name}', 'id'), coalesce(max(id)+1,1), false) FROM \"{table_name}\"".format(  # nosec
                                table_name=table_name)
                            side_db.engine.execute(query)
                        else:
                            raise Exception(
                                "Table name {table_name} contains quotes".
                                format(table_name=table_name))

    # Insert data from official tables
    insertion(first)

    # Create tables created by plugins
    # Run plugin migrations
    plugins = get_plugin_names()
    for plugin in plugins:
        revision = plugin_current(plugin_name=plugin)
        plugin_upgrade(plugin_name=plugin, revision=revision, lower=None)

    # Insert data for plugin tables
    insertion(members)

    # Bring plugin tables up to head revision
    plugins = get_plugin_names()
    for plugin in plugins:
        plugin_upgrade(plugin_name=plugin)

    # Extracting files
    files = [f for f in backup.namelist() if f.startswith("uploads/")]
    uploader = get_uploader()
    for f in files:
        filename = f.split(os.sep, 1)

        if (
                len(filename) < 2 or os.path.basename(filename[1]) == ""
        ):  # just an empty uploads directory (e.g. uploads/) or any directory
            continue

        filename = filename[
            1]  # Get the second entry in the list (the actual filename)
        source = backup.open(f)
        uploader.store(fileobj=source, filename=filename)

    # Alembic sqlite support is lacking so we should just create_all anyway
    if sqlite:
        app.db.create_all()
        stamp_latest_revision()
    else:
        # Run migrations to bring to latest version
        migration_upgrade(revision="head")
        # Create any leftover tables, perhaps from old plugins
        app.db.create_all()

    try:
        if postgres:
            side_db.query("SET session_replication_role=DEFAULT;")
        else:
            side_db.query("SET FOREIGN_KEY_CHECKS=1;")
    except Exception:
        print("Failed to enable foreign key checks. Continuing.")

    # Invalidate all cached data
    cache.clear()

    # Set default theme in case the current instance or the import does not provide it
    set_config("ctf_theme", "core")
    set_config("ctf_version", CTFD_VERSION)
예제 #7
0
def import_ctf(backup, erase=True):
    if not zipfile.is_zipfile(backup):
        raise zipfile.BadZipfile

    if erase:
        drop_database()
        create_database()
        # We explicitly do not want to upgrade or stamp here.
        # The import will have this information.

    side_db = dataset.connect(get_app_config('SQLALCHEMY_DATABASE_URI'))
    sqlite = get_app_config('SQLALCHEMY_DATABASE_URI').startswith('sqlite')
    postgres = get_app_config('SQLALCHEMY_DATABASE_URI').startswith('postgres')

    backup = zipfile.ZipFile(backup)

    members = backup.namelist()
    max_content_length = get_app_config('MAX_CONTENT_LENGTH')
    for f in members:
        if f.startswith('/') or '..' in f:
            # Abort on malicious zip files
            raise zipfile.BadZipfile
        info = backup.getinfo(f)
        if max_content_length:
            if info.file_size > max_content_length:
                raise zipfile.LargeZipFile

    first = [
        'db/teams.json',
        'db/users.json',
        'db/challenges.json',
        'db/dynamic_challenge.json',

        'db/flags.json',
        'db/hints.json',
        'db/unlocks.json',
        'db/awards.json',
        'db/tags.json',

        'db/submissions.json',
        'db/solves.json',

        'db/files.json',

        'db/notifications.json',
        'db/pages.json',

        'db/tracking.json',
        'db/config.json',
    ]

    for item in first:
        if item in members:
            members.remove(item)

    members = first + members

    alembic_version = json.loads(backup.open('db/alembic_version.json').read())["results"][0]["version_num"]
    upgrade(revision=alembic_version)

    # Create tables created by plugins
    try:
        app.db.create_all()
    except OperationalError as e:
        if not postgres:
            raise e
        else:
            print("Allowing error during app.db.create_all() due to Postgres")

    members.remove('db/alembic_version.json')

    for member in members:
        if member.startswith('db/'):
            table_name = member[3:-5]

            try:
                # Try to open a file but skip if it doesn't exist.
                data = backup.open(member).read()
            except KeyError:
                continue

            if data:
                table = side_db[table_name]

                saved = json.loads(data)
                for entry in saved['results']:
                    # This is a hack to get SQLite to properly accept datetime values from dataset
                    # See Issue #246
                    if sqlite:
                        for k, v in entry.items():
                            if isinstance(v, six.string_types):
                                match = re.match(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d", v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(v, '%Y-%m-%dT%H:%M:%S.%f')
                                    continue
                                match = re.match(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(v, '%Y-%m-%dT%H:%M:%S')
                                    continue
                    # From v2.0.0 to v2.1.0 requirements could have been a string or JSON because of a SQLAlchemy issue
                    # This is a hack to ensure we can still accept older exports. See #867
                    if member in ('db/challenges.json', 'db/hints.json', 'db/awards.json'):
                        requirements = entry.get('requirements')
                        if requirements and isinstance(requirements, six.string_types):
                            entry['requirements'] = json.loads(requirements)

                    try:
                        table.insert(entry)
                    except ProgrammingError:
                        # MariaDB does not like JSON objects and prefers strings because it internally
                        # represents JSON with LONGTEXT.
                        # See Issue #973
                        requirements = entry.get('requirements')
                        if requirements and isinstance(requirements, dict):
                            entry['requirements'] = json.dumps(requirements)
                        table.insert(entry)

                    db.session.commit()
                if postgres:
                    # This command is to set the next primary key ID for the re-inserted tables in Postgres. However,
                    # this command is very difficult to translate into SQLAlchemy code. Because Postgres is not
                    # officially supported, no major work will go into this functionality.
                    # https://stackoverflow.com/a/37972960
                    if '"' not in table_name and '\'' not in table_name:
                        query = "SELECT setval(pg_get_serial_sequence('{table_name}', 'id'), coalesce(max(id)+1,1), false) FROM \"{table_name}\"".format(  # nosec
                            table_name=table_name
                        )
                        side_db.engine.execute(query)
                    else:
                        raise Exception('Table name {table_name} contains quotes'.format(table_name=table_name))

    # Extracting files
    files = [f for f in backup.namelist() if f.startswith('uploads/')]
    uploader = get_uploader()
    for f in files:
        filename = f.split(os.sep, 1)

        if len(filename) < 2:  # just an empty uploads directory (e.g. uploads/)
            continue

        filename = filename[1]  # Get the second entry in the list (the actual filename)
        source = backup.open(f)
        uploader.store(fileobj=source, filename=filename)

    # Alembic sqlite support is lacking so we should just create_all anyway
    try:
        upgrade(revision='head')
    except (CommandError, RuntimeError, SystemExit):
        app.db.create_all()
        stamp()

    # Invalidate all cached data
    cache.clear()

    # Set default theme in case the current instance or the import does not provide it
    set_config('ctf_theme', 'core')
예제 #8
0
파일: __init__.py 프로젝트: shareef12/CTFd
def import_ctf(backup, erase=True):
    if not zipfile.is_zipfile(backup):
        raise zipfile.BadZipfile

    if erase:
        drop_database()
        create_database()
        # We explicitly do not want to upgrade or stamp here.
        # The import will have this information.

    side_db = dataset.connect(get_app_config('SQLALCHEMY_DATABASE_URI'))
    sqlite = get_app_config('SQLALCHEMY_DATABASE_URI').startswith('sqlite')
    postgres = get_app_config('SQLALCHEMY_DATABASE_URI').startswith('postgres')

    backup = zipfile.ZipFile(backup)

    members = backup.namelist()
    max_content_length = get_app_config('MAX_CONTENT_LENGTH')
    for f in members:
        if f.startswith('/') or '..' in f:
            # Abort on malicious zip files
            raise zipfile.BadZipfile
        info = backup.getinfo(f)
        if max_content_length:
            if info.file_size > max_content_length:
                raise zipfile.LargeZipFile

    first = [
        'db/teams.json',
        'db/users.json',
        'db/challenges.json',
        'db/dynamic_challenge.json',
        'db/flags.json',
        'db/hints.json',
        'db/unlocks.json',
        'db/awards.json',
        'db/tags.json',
        'db/submissions.json',
        'db/solves.json',
        'db/files.json',
        'db/notifications.json',
        'db/pages.json',
        'db/tracking.json',
        'db/config.json',
    ]

    for item in first:
        if item in members:
            members.remove(item)

    members = first + members

    alembic_version = json.loads(
        backup.open(
            'db/alembic_version.json').read())["results"][0]["version_num"]
    upgrade(revision=alembic_version)
    members.remove('db/alembic_version.json')

    for member in members:
        if member.startswith('db/'):
            table_name = member[3:-5]

            try:
                # Try to open a file but skip if it doesn't exist.
                data = backup.open(member).read()
            except KeyError:
                continue

            if data:
                table = side_db[table_name]

                saved = json.loads(data)
                for entry in saved['results']:
                    # This is a hack to get SQLite to properly accept datetime values from dataset
                    # See Issue #246
                    if sqlite:
                        for k, v in entry.items():
                            if isinstance(v, six.string_types):
                                match = re.match(
                                    r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d",
                                    v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(
                                        v, '%Y-%m-%dT%H:%M:%S.%f')
                                    continue
                                match = re.match(
                                    r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(
                                        v, '%Y-%m-%dT%H:%M:%S')
                                    continue
                    table.insert(entry)
                    db.session.commit()
                if postgres:
                    # TODO: This should be sanitized even though exports are basically SQL dumps
                    # Databases are so hard
                    # https://stackoverflow.com/a/37972960
                    side_db.engine.execute(
                        "SELECT setval(pg_get_serial_sequence('{table_name}', 'id'), coalesce(max(id)+1,1), false) FROM {table_name}"
                        .format(table_name=table_name))

    # Extracting files
    files = [f for f in backup.namelist() if f.startswith('uploads/')]
    uploader = get_uploader()
    for f in files:
        filename = f.split(os.sep, 1)

        if len(filename
               ) < 2:  # just an empty uploads directory (e.g. uploads/)
            continue

        filename = filename[
            1]  # Get the second entry in the list (the actual filename)
        source = backup.open(f)
        uploader.store(fileobj=source, filename=filename)

    cache.clear()