コード例 #1
0
 def delete(self, challenge_id):
     uploader = get_uploader()
     files = JudgeCaseFiles.query.filter_by(challenge_id=challenge_id).all()
     for f in files:
         uploader.delete(f.location)
     JudgeCaseFiles.query.filter_by(challenge_id=challenge_id).delete()
     db.session.commit()
     db.session.close()
コード例 #2
0
def files(path):
    """
    Route in charge of dealing with making sure that CTF challenges are only accessible during the competition.
    :param path:
    :return:
    """
    f = Files.query.filter_by(location=path).first_or_404()
    if f.type == 'challenge':
        if challenges_visible():
            if current_user.is_admin() is False:
                if not ctftime():
                    abort(403)
        else:
            if not ctftime():
                abort(403)

            # Allow downloads if a valid token is provided
            token = request.args.get('token', '')
            try:
                data = unserialize(token, max_age=3600)
                user_id = data.get('user_id')
                team_id = data.get('team_id')
                file_id = data.get('file_id')
                user = Users.query.filter_by(id=user_id).first()
                team = Teams.query.filter_by(id=team_id).first()

                # Check user is admin if challenge_visibility is admins only
                if get_config('challenge_visibility'
                              ) == 'admins' and user.type != 'admin':
                    abort(403)

                # Check that the user exists and isn't banned
                if user:
                    if user.banned:
                        abort(403)
                else:
                    abort(403)

                # Check that the team isn't banned
                if team:
                    if team.banned:
                        abort(403)
                else:
                    pass

                # Check that the token properly refers to the file
                if file_id != f.id:
                    abort(403)

            # The token isn't expired or broken
            except (BadTimeSignature, SignatureExpired, BadSignature):
                abort(403)

    uploader = get_uploader()
    try:
        return uploader.download(f.location)
    except IOError:
        abort(404)
コード例 #3
0
ファイル: __init__.py プロジェクト: wicakhendrik/CTFd
def export_ctf():
    # TODO: For some unknown reason dataset is only able to see alembic_version during tests.
    # Even using a real sqlite database. This makes this test impossible to pass in sqlite.
    db = dataset.connect(get_app_config("SQLALCHEMY_DATABASE_URI"))

    # Backup database
    backup = tempfile.NamedTemporaryFile()

    backup_zip = zipfile.ZipFile(backup, "w")

    tables = db.tables
    for table in tables:
        result = db[table].all()
        result_file = six.BytesIO()
        datafreeze.freeze(result, format="ctfd", fileobj=result_file)
        result_file.seek(0)
        backup_zip.writestr("db/{}.json".format(table), result_file.read())

    # # Guarantee that alembic_version is saved into the export
    if "alembic_version" not in tables:
        result = {
            "count": 1,
            "results": [{
                "version_num": get_current_revision()
            }],
            "meta": {},
        }
        result_file = six.BytesIO()
        json.dump(result, result_file)
        result_file.seek(0)
        backup_zip.writestr("db/alembic_version.json", result_file.read())

    # Backup uploads
    uploader = get_uploader()
    uploader.sync()

    upload_folder = os.path.join(os.path.normpath(app.root_path),
                                 app.config.get("UPLOAD_FOLDER"))
    for root, dirs, files in os.walk(upload_folder):
        for file in files:
            parent_dir = os.path.basename(root)
            backup_zip.write(
                os.path.join(root, file),
                arcname=os.path.join("uploads", parent_dir, file),
            )

    backup_zip.close()
    backup.seek(0)
    return backup
コード例 #4
0
 def post(self, challenge_id):
     files = request.files.getlist("file")
     objs = []
     for f in files:
         uploader = get_uploader()
         location = uploader.upload(file_obj=f, filename=f.filename)
         file_row = JudgeCaseFiles(
             challenge_id=challenge_id, location=location)
         db.session.add(file_row)
         db.session.commit()
         objs.append(file_row)
     schema = FileSchema(many=True)
     response = schema.dump(objs)
     if response.errors:
         return {"success": False, "errors": response.errorss}, 400
     return {"success": True, "data": response.data}
コード例 #5
0
ファイル: views.py プロジェクト: shareef12/CTFd
def files(path):
    """
    Route in charge of dealing with making sure that CTF challenges are only accessible during the competition.
    :param path:
    :return:
    """
    f = Files.query.filter_by(location=path).first_or_404()
    if f.type == 'challenge':
        if current_user.authed():
            if current_user.is_admin() is False:
                if not ctftime():
                    abort(403)
        else:
            abort(403)

    uploader = get_uploader()
    try:
        return uploader.download(f.location)
    except IOError:
        abort(404)
コード例 #6
0
    def delete(challenge):
        Fails.query.filter_by(challenge_id=challenge.id).delete()
        Solves.query.filter_by(challenge_id=challenge.id).delete()
        Flags.query.filter_by(challenge_id=challenge.id).delete()

        files = ChallengeFiles.query.filter_by(challenge_id=challenge.id).all()
        for f in files:
            delete_file(f.id)
        ChallengeFiles.query.filter_by(challenge_id=challenge.id).delete()
        uploader = get_uploader()
        files = JudgeCaseFiles.query.filter_by(challenge_id=challenge_id).all()
        for f in files:
            uploader.delete(f.location)
        JudgeCaseFiles.query.filter_by(challenge_id=challenge_id).delete()

        Tags.query.filter_by(challenge_id=challenge.id).delete()
        Hints.query.filter_by(challenge_id=challenge.id).delete()
        DynICPCModel.query.filter_by(id=challenge.id).delete()
        Challenges.query.filter_by(id=challenge.id).delete()
        db.session.commit()
コード例 #7
0
ファイル: __init__.py プロジェクト: wicakhendrik/CTFd
def import_ctf(backup, erase=True):
    if not zipfile.is_zipfile(backup):
        raise zipfile.BadZipfile

    backup = zipfile.ZipFile(backup)

    members = backup.namelist()
    max_content_length = get_app_config("MAX_CONTENT_LENGTH")
    for f in members:
        if f.startswith("/") or ".." in f:
            # Abort on malicious zip files
            raise zipfile.BadZipfile
        info = backup.getinfo(f)
        if max_content_length:
            if info.file_size > max_content_length:
                raise zipfile.LargeZipFile

    try:
        alembic_version = json.loads(
            backup.open("db/alembic_version.json").read())
        alembic_version = alembic_version["results"][0]["version_num"]
    except Exception:
        raise Exception(
            "Could not determine appropriate database version. This backup cannot be automatically imported."
        )

    # Check if the alembic version is from CTFd 1.x
    if alembic_version in (
            "1ec4a28fe0ff",
            "2539d8b5082e",
            "7e9efd084c5a",
            "87733981ca0e",
            "a4e30c94c360",
            "c12d2a1b0926",
            "c7225db614c1",
            "cb3cfcc47e2f",
            "cbf5620f8e15",
            "d5a224bf5862",
            "d6514ec92738",
            "dab615389702",
            "e62fd69bd417",
    ):
        raise Exception(
            "The version of CTFd that this backup is from is too old to be automatically imported."
        )

    if erase:
        drop_database()
        create_database()
        # We explicitly do not want to upgrade or stamp here.
        # The import will have this information.

    side_db = dataset.connect(get_app_config("SQLALCHEMY_DATABASE_URI"))
    sqlite = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("sqlite")
    postgres = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("postgres")

    try:
        if postgres:
            side_db.query("SET session_replication_role=replica;")
        else:
            side_db.query("SET FOREIGN_KEY_CHECKS=0;")
    except Exception:
        print("Failed to disable foreign key checks. Continuing.")

    first = [
        "db/teams.json",
        "db/users.json",
        "db/challenges.json",
        "db/dynamic_challenge.json",
        "db/flags.json",
        "db/hints.json",
        "db/unlocks.json",
        "db/awards.json",
        "db/tags.json",
        "db/submissions.json",
        "db/solves.json",
        "db/files.json",
        "db/notifications.json",
        "db/pages.json",
        "db/tracking.json",
        "db/config.json",
    ]

    for item in first:
        if item in members:
            members.remove(item)

    members = first + members

    upgrade(revision=alembic_version)

    # Create tables created by plugins
    try:
        app.db.create_all()
    except OperationalError as e:
        if not postgres:
            raise e
        else:
            print("Allowing error during app.db.create_all() due to Postgres")

    members.remove("db/alembic_version.json")

    for member in members:
        if member.startswith("db/"):
            table_name = member[3:-5]

            try:
                # Try to open a file but skip if it doesn't exist.
                data = backup.open(member).read()
            except KeyError:
                continue

            if data:
                table = side_db[table_name]

                saved = json.loads(data)
                for entry in saved["results"]:
                    # This is a hack to get SQLite to properly accept datetime values from dataset
                    # See Issue #246
                    if sqlite:
                        for k, v in entry.items():
                            if isinstance(v, six.string_types):
                                match = re.match(
                                    r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d",
                                    v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(
                                        v, "%Y-%m-%dT%H:%M:%S.%f")
                                    continue
                                match = re.match(
                                    r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(
                                        v, "%Y-%m-%dT%H:%M:%S")
                                    continue
                    # From v2.0.0 to v2.1.0 requirements could have been a string or JSON because of a SQLAlchemy issue
                    # This is a hack to ensure we can still accept older exports. See #867
                    if member in (
                            "db/challenges.json",
                            "db/hints.json",
                            "db/awards.json",
                    ):
                        requirements = entry.get("requirements")
                        if requirements and isinstance(requirements,
                                                       six.string_types):
                            entry["requirements"] = json.loads(requirements)

                    try:
                        table.insert(entry)
                    except ProgrammingError:
                        # MariaDB does not like JSON objects and prefers strings because it internally
                        # represents JSON with LONGTEXT.
                        # See Issue #973
                        requirements = entry.get("requirements")
                        if requirements and isinstance(requirements, dict):
                            entry["requirements"] = json.dumps(requirements)
                        table.insert(entry)

                    db.session.commit()
                if postgres:
                    # This command is to set the next primary key ID for the re-inserted tables in Postgres. However,
                    # this command is very difficult to translate into SQLAlchemy code. Because Postgres is not
                    # officially supported, no major work will go into this functionality.
                    # https://stackoverflow.com/a/37972960
                    if '"' not in table_name and "'" not in table_name:
                        query = "SELECT setval(pg_get_serial_sequence('{table_name}', 'id'), coalesce(max(id)+1,1), false) FROM \"{table_name}\"".format(  # nosec
                            table_name=table_name)
                        side_db.engine.execute(query)
                    else:
                        raise Exception(
                            "Table name {table_name} contains quotes".format(
                                table_name=table_name))

    # Extracting files
    files = [f for f in backup.namelist() if f.startswith("uploads/")]
    uploader = get_uploader()
    for f in files:
        filename = f.split(os.sep, 1)

        if len(filename
               ) < 2:  # just an empty uploads directory (e.g. uploads/)
            continue

        filename = filename[
            1]  # Get the second entry in the list (the actual filename)
        source = backup.open(f)
        uploader.store(fileobj=source, filename=filename)

    # Alembic sqlite support is lacking so we should just create_all anyway
    try:
        upgrade(revision="head")
    except (OperationalError, CommandError, RuntimeError, SystemExit,
            Exception):
        app.db.create_all()
        stamp_latest_revision()

    try:
        if postgres:
            side_db.query("SET session_replication_role=DEFAULT;")
        else:
            side_db.query("SET FOREIGN_KEY_CHECKS=1;")
    except Exception:
        print("Failed to enable foreign key checks. Continuing.")

    # Invalidate all cached data
    cache.clear()

    # Set default theme in case the current instance or the import does not provide it
    set_config("ctf_theme", "core")
コード例 #8
0
def import_ctf(backup, erase=True):
    if not zipfile.is_zipfile(backup):
        raise zipfile.BadZipfile

    backup = zipfile.ZipFile(backup)

    members = backup.namelist()
    max_content_length = get_app_config("MAX_CONTENT_LENGTH")
    for f in members:
        if f.startswith("/") or ".." in f:
            # Abort on malicious zip files
            raise zipfile.BadZipfile
        info = backup.getinfo(f)
        if max_content_length:
            if info.file_size > max_content_length:
                raise zipfile.LargeZipFile

    # Get list of directories in zipfile
    member_dirs = [os.path.split(m)[0] for m in members if "/" in m]
    if "db" not in member_dirs:
        raise Exception(
            'CTFd couldn\'t find the "db" folder in this backup. '
            "The backup may be malformed or corrupted and the import process cannot continue."
        )

    try:
        alembic_version = json.loads(
            backup.open("db/alembic_version.json").read())
        alembic_version = alembic_version["results"][0]["version_num"]
    except Exception:
        raise Exception(
            "Could not determine appropriate database version. This backup cannot be automatically imported."
        )

    # Check if the alembic version is from CTFd 1.x
    if alembic_version in (
            "1ec4a28fe0ff",
            "2539d8b5082e",
            "7e9efd084c5a",
            "87733981ca0e",
            "a4e30c94c360",
            "c12d2a1b0926",
            "c7225db614c1",
            "cb3cfcc47e2f",
            "cbf5620f8e15",
            "d5a224bf5862",
            "d6514ec92738",
            "dab615389702",
            "e62fd69bd417",
    ):
        raise Exception(
            "The version of CTFd that this backup is from is too old to be automatically imported."
        )

    sqlite = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("sqlite")
    postgres = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("postgres")
    mysql = get_app_config("SQLALCHEMY_DATABASE_URI").startswith("mysql")

    if erase:
        # Clear out existing connections to release any locks
        db.session.close()
        db.engine.dispose()

        # Kill sleeping processes on MySQL so we don't get a metadata lock
        # In my testing I didn't find that Postgres or SQLite needed the same treatment
        # Only run this when not in tests as we can't isolate the queries out
        # This is a very dirty hack. Don't try this at home kids.
        if mysql and get_app_config("TESTING", default=False) is False:
            url = make_url(get_app_config("SQLALCHEMY_DATABASE_URI"))
            r = db.session.execute("SHOW PROCESSLIST")
            processes = r.fetchall()
            for proc in processes:
                if (proc.Command == "Sleep" and proc.User == url.username
                        and proc.db == url.database):
                    proc_id = proc.Id
                    db.session.execute(f"KILL {proc_id}")

        # Drop database and recreate it to get to a clean state
        drop_database()
        create_database()
        # We explicitly do not want to upgrade or stamp here.
        # The import will have this information.

    side_db = dataset.connect(get_app_config("SQLALCHEMY_DATABASE_URI"))

    try:
        if postgres:
            side_db.query("SET session_replication_role=replica;")
        else:
            side_db.query("SET FOREIGN_KEY_CHECKS=0;")
    except Exception:
        print("Failed to disable foreign key checks. Continuing.")

    first = [
        "db/teams.json",
        "db/users.json",
        "db/challenges.json",
        "db/dynamic_challenge.json",
        "db/flags.json",
        "db/hints.json",
        "db/unlocks.json",
        "db/awards.json",
        "db/tags.json",
        "db/submissions.json",
        "db/solves.json",
        "db/files.json",
        "db/notifications.json",
        "db/pages.json",
        "db/tracking.json",
        "db/config.json",
    ]

    # We want to insert certain database tables first so we are specifying
    # the order with a list. The leftover tables are tables that are from a
    # plugin (more likely) or a table where we do not care about insertion order
    for item in first:
        if item in members:
            members.remove(item)

    # Upgrade the database to the point in time that the import was taken from
    migration_upgrade(revision=alembic_version)

    members.remove("db/alembic_version.json")

    # Combine the database insertion code into a function so that we can pause
    # insertion between official database tables and plugin tables
    def insertion(table_filenames):
        for member in table_filenames:
            if member.startswith("db/"):
                table_name = member[3:-5]

                try:
                    # Try to open a file but skip if it doesn't exist.
                    data = backup.open(member).read()
                except KeyError:
                    continue

                if data:
                    table = side_db[table_name]

                    saved = json.loads(data)
                    for entry in saved["results"]:
                        # This is a hack to get SQLite to properly accept datetime values from dataset
                        # See Issue #246
                        if sqlite:
                            direct_table = get_class_by_tablename(table.name)
                            for k, v in entry.items():
                                if isinstance(v, string_types):
                                    # We only want to apply this hack to columns that are expecting a datetime object
                                    try:
                                        is_dt_column = (type(
                                            getattr(
                                                direct_table,
                                                k).type) == sqltypes.DateTime)
                                    except AttributeError:
                                        is_dt_column = False

                                    # If the table is expecting a datetime, we should check if the string is one and convert it
                                    if is_dt_column:
                                        match = re.match(
                                            r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d",
                                            v,
                                        )
                                        if match:
                                            entry[
                                                k] = datetime.datetime.strptime(
                                                    v, "%Y-%m-%dT%H:%M:%S.%f")
                                            continue
                                        match = re.match(
                                            r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}",
                                            v)
                                        if match:
                                            entry[
                                                k] = datetime.datetime.strptime(
                                                    v, "%Y-%m-%dT%H:%M:%S")
                                            continue
                        # From v2.0.0 to v2.1.0 requirements could have been a string or JSON because of a SQLAlchemy issue
                        # This is a hack to ensure we can still accept older exports. See #867
                        if member in (
                                "db/challenges.json",
                                "db/hints.json",
                                "db/awards.json",
                        ):
                            requirements = entry.get("requirements")
                            if requirements and isinstance(
                                    requirements, string_types):
                                entry["requirements"] = json.loads(
                                    requirements)

                        try:
                            table.insert(entry)
                        except ProgrammingError:
                            # MariaDB does not like JSON objects and prefers strings because it internally
                            # represents JSON with LONGTEXT.
                            # See Issue #973
                            requirements = entry.get("requirements")
                            if requirements and isinstance(requirements, dict):
                                entry["requirements"] = json.dumps(
                                    requirements)
                            table.insert(entry)

                        db.session.commit()
                    if postgres:
                        # This command is to set the next primary key ID for the re-inserted tables in Postgres. However,
                        # this command is very difficult to translate into SQLAlchemy code. Because Postgres is not
                        # officially supported, no major work will go into this functionality.
                        # https://stackoverflow.com/a/37972960
                        if '"' not in table_name and "'" not in table_name:
                            query = "SELECT setval(pg_get_serial_sequence('{table_name}', 'id'), coalesce(max(id)+1,1), false) FROM \"{table_name}\"".format(  # nosec
                                table_name=table_name)
                            side_db.engine.execute(query)
                        else:
                            raise Exception(
                                "Table name {table_name} contains quotes".
                                format(table_name=table_name))

    # Insert data from official tables
    insertion(first)

    # Create tables created by plugins
    # Run plugin migrations
    plugins = get_plugin_names()
    for plugin in plugins:
        revision = plugin_current(plugin_name=plugin)
        plugin_upgrade(plugin_name=plugin, revision=revision, lower=None)

    # Insert data for plugin tables
    insertion(members)

    # Bring plugin tables up to head revision
    plugins = get_plugin_names()
    for plugin in plugins:
        plugin_upgrade(plugin_name=plugin)

    # Extracting files
    files = [f for f in backup.namelist() if f.startswith("uploads/")]
    uploader = get_uploader()
    for f in files:
        filename = f.split(os.sep, 1)

        if (
                len(filename) < 2 or os.path.basename(filename[1]) == ""
        ):  # just an empty uploads directory (e.g. uploads/) or any directory
            continue

        filename = filename[
            1]  # Get the second entry in the list (the actual filename)
        source = backup.open(f)
        uploader.store(fileobj=source, filename=filename)

    # Alembic sqlite support is lacking so we should just create_all anyway
    if sqlite:
        app.db.create_all()
        stamp_latest_revision()
    else:
        # Run migrations to bring to latest version
        migration_upgrade(revision="head")
        # Create any leftover tables, perhaps from old plugins
        app.db.create_all()

    try:
        if postgres:
            side_db.query("SET session_replication_role=DEFAULT;")
        else:
            side_db.query("SET FOREIGN_KEY_CHECKS=1;")
    except Exception:
        print("Failed to enable foreign key checks. Continuing.")

    # Invalidate all cached data
    cache.clear()

    # Set default theme in case the current instance or the import does not provide it
    set_config("ctf_theme", "core")
    set_config("ctf_version", CTFD_VERSION)
コード例 #9
0
def files(path):
    """
    Route in charge of dealing with making sure that CTF challenges are only accessible during the competition.
    :param path:
    :return:
    """
    f = Files.query.filter_by(location=path).first_or_404()
    if f.type == "challenge":
        if challenges_visible():
            if current_user.is_admin() is False:
                if not ctftime():
                    if ctf_ended() and view_after_ctf():
                        pass
                    else:
                        abort(403)
        else:
            # User cannot view challenges based on challenge visibility
            # e.g. ctf requires registration but user isn't authed or
            # ctf requires admin account but user isn't admin
            if not ctftime():
                # It's not CTF time. The only edge case is if the CTF is ended
                # but we have view_after_ctf enabled
                if ctf_ended() and view_after_ctf():
                    pass
                else:
                    # In all other situations we should block challenge files
                    abort(403)

            # Allow downloads if a valid token is provided
            token = request.args.get("token", "")
            try:
                data = unserialize(token, max_age=3600)
                user_id = data.get("user_id")
                team_id = data.get("team_id")
                file_id = data.get("file_id")
                user = Users.query.filter_by(id=user_id).first()
                team = Teams.query.filter_by(id=team_id).first()

                # Check user is admin if challenge_visibility is admins only
                if (
                    get_config(ConfigTypes.CHALLENGE_VISIBILITY) == "admins"
                    and user.type != "admin"
                ):
                    abort(403)

                # Check that the user exists and isn't banned
                if user:
                    if user.banned:
                        abort(403)
                else:
                    abort(403)

                # Check that the team isn't banned
                if team:
                    if team.banned:
                        abort(403)
                else:
                    pass

                # Check that the token properly refers to the file
                if file_id != f.id:
                    abort(403)

            # The token isn't expired or broken
            except (BadTimeSignature, SignatureExpired, BadSignature):
                abort(403)

    uploader = get_uploader()
    try:
        return uploader.download(f.location)
    except IOError:
        abort(404)
コード例 #10
0
ファイル: __init__.py プロジェクト: mrigank-9594/srhctf_
def import_ctf(backup, erase=True):
    if not zipfile.is_zipfile(backup):
        raise zipfile.BadZipfile

    if erase:
        drop_database()
        create_database()
        # We explicitly do not want to upgrade or stamp here.
        # The import will have this information.

    side_db = dataset.connect(get_app_config('SQLALCHEMY_DATABASE_URI'))
    sqlite = get_app_config('SQLALCHEMY_DATABASE_URI').startswith('sqlite')
    postgres = get_app_config('SQLALCHEMY_DATABASE_URI').startswith('postgres')

    backup = zipfile.ZipFile(backup)

    members = backup.namelist()
    max_content_length = get_app_config('MAX_CONTENT_LENGTH')
    for f in members:
        if f.startswith('/') or '..' in f:
            # Abort on malicious zip files
            raise zipfile.BadZipfile
        info = backup.getinfo(f)
        if max_content_length:
            if info.file_size > max_content_length:
                raise zipfile.LargeZipFile

    first = [
        'db/teams.json',
        'db/users.json',
        'db/challenges.json',
        'db/dynamic_challenge.json',

        'db/flags.json',
        'db/hints.json',
        'db/unlocks.json',
        'db/awards.json',
        'db/tags.json',

        'db/submissions.json',
        'db/solves.json',

        'db/files.json',

        'db/notifications.json',
        'db/pages.json',

        'db/tracking.json',
        'db/config.json',
    ]

    for item in first:
        if item in members:
            members.remove(item)

    members = first + members

    alembic_version = json.loads(backup.open('db/alembic_version.json').read())["results"][0]["version_num"]
    upgrade(revision=alembic_version)

    # Create tables created by plugins
    try:
        app.db.create_all()
    except OperationalError as e:
        if not postgres:
            raise e
        else:
            print("Allowing error during app.db.create_all() due to Postgres")

    members.remove('db/alembic_version.json')

    for member in members:
        if member.startswith('db/'):
            table_name = member[3:-5]

            try:
                # Try to open a file but skip if it doesn't exist.
                data = backup.open(member).read()
            except KeyError:
                continue

            if data:
                table = side_db[table_name]

                saved = json.loads(data)
                for entry in saved['results']:
                    # This is a hack to get SQLite to properly accept datetime values from dataset
                    # See Issue #246
                    if sqlite:
                        for k, v in entry.items():
                            if isinstance(v, six.string_types):
                                match = re.match(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d", v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(v, '%Y-%m-%dT%H:%M:%S.%f')
                                    continue
                                match = re.match(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(v, '%Y-%m-%dT%H:%M:%S')
                                    continue
                    # From v2.0.0 to v2.1.0 requirements could have been a string or JSON because of a SQLAlchemy issue
                    # This is a hack to ensure we can still accept older exports. See #867
                    if member in ('db/challenges.json', 'db/hints.json', 'db/awards.json'):
                        requirements = entry.get('requirements')
                        if requirements and isinstance(requirements, six.string_types):
                            entry['requirements'] = json.loads(requirements)

                    try:
                        table.insert(entry)
                    except ProgrammingError:
                        # MariaDB does not like JSON objects and prefers strings because it internally
                        # represents JSON with LONGTEXT.
                        # See Issue #973
                        requirements = entry.get('requirements')
                        if requirements and isinstance(requirements, dict):
                            entry['requirements'] = json.dumps(requirements)
                        table.insert(entry)

                    db.session.commit()
                if postgres:
                    # This command is to set the next primary key ID for the re-inserted tables in Postgres. However,
                    # this command is very difficult to translate into SQLAlchemy code. Because Postgres is not
                    # officially supported, no major work will go into this functionality.
                    # https://stackoverflow.com/a/37972960
                    if '"' not in table_name and '\'' not in table_name:
                        query = "SELECT setval(pg_get_serial_sequence('{table_name}', 'id'), coalesce(max(id)+1,1), false) FROM \"{table_name}\"".format(  # nosec
                            table_name=table_name
                        )
                        side_db.engine.execute(query)
                    else:
                        raise Exception('Table name {table_name} contains quotes'.format(table_name=table_name))

    # Extracting files
    files = [f for f in backup.namelist() if f.startswith('uploads/')]
    uploader = get_uploader()
    for f in files:
        filename = f.split(os.sep, 1)

        if len(filename) < 2:  # just an empty uploads directory (e.g. uploads/)
            continue

        filename = filename[1]  # Get the second entry in the list (the actual filename)
        source = backup.open(f)
        uploader.store(fileobj=source, filename=filename)

    # Alembic sqlite support is lacking so we should just create_all anyway
    try:
        upgrade(revision='head')
    except (CommandError, RuntimeError, SystemExit):
        app.db.create_all()
        stamp()

    # Invalidate all cached data
    cache.clear()

    # Set default theme in case the current instance or the import does not provide it
    set_config('ctf_theme', 'core')
コード例 #11
0
ファイル: __init__.py プロジェクト: shareef12/CTFd
def import_ctf(backup, erase=True):
    if not zipfile.is_zipfile(backup):
        raise zipfile.BadZipfile

    if erase:
        drop_database()
        create_database()
        # We explicitly do not want to upgrade or stamp here.
        # The import will have this information.

    side_db = dataset.connect(get_app_config('SQLALCHEMY_DATABASE_URI'))
    sqlite = get_app_config('SQLALCHEMY_DATABASE_URI').startswith('sqlite')
    postgres = get_app_config('SQLALCHEMY_DATABASE_URI').startswith('postgres')

    backup = zipfile.ZipFile(backup)

    members = backup.namelist()
    max_content_length = get_app_config('MAX_CONTENT_LENGTH')
    for f in members:
        if f.startswith('/') or '..' in f:
            # Abort on malicious zip files
            raise zipfile.BadZipfile
        info = backup.getinfo(f)
        if max_content_length:
            if info.file_size > max_content_length:
                raise zipfile.LargeZipFile

    first = [
        'db/teams.json',
        'db/users.json',
        'db/challenges.json',
        'db/dynamic_challenge.json',
        'db/flags.json',
        'db/hints.json',
        'db/unlocks.json',
        'db/awards.json',
        'db/tags.json',
        'db/submissions.json',
        'db/solves.json',
        'db/files.json',
        'db/notifications.json',
        'db/pages.json',
        'db/tracking.json',
        'db/config.json',
    ]

    for item in first:
        if item in members:
            members.remove(item)

    members = first + members

    alembic_version = json.loads(
        backup.open(
            'db/alembic_version.json').read())["results"][0]["version_num"]
    upgrade(revision=alembic_version)
    members.remove('db/alembic_version.json')

    for member in members:
        if member.startswith('db/'):
            table_name = member[3:-5]

            try:
                # Try to open a file but skip if it doesn't exist.
                data = backup.open(member).read()
            except KeyError:
                continue

            if data:
                table = side_db[table_name]

                saved = json.loads(data)
                for entry in saved['results']:
                    # This is a hack to get SQLite to properly accept datetime values from dataset
                    # See Issue #246
                    if sqlite:
                        for k, v in entry.items():
                            if isinstance(v, six.string_types):
                                match = re.match(
                                    r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d",
                                    v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(
                                        v, '%Y-%m-%dT%H:%M:%S.%f')
                                    continue
                                match = re.match(
                                    r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}", v)
                                if match:
                                    entry[k] = datetime.datetime.strptime(
                                        v, '%Y-%m-%dT%H:%M:%S')
                                    continue
                    table.insert(entry)
                    db.session.commit()
                if postgres:
                    # TODO: This should be sanitized even though exports are basically SQL dumps
                    # Databases are so hard
                    # https://stackoverflow.com/a/37972960
                    side_db.engine.execute(
                        "SELECT setval(pg_get_serial_sequence('{table_name}', 'id'), coalesce(max(id)+1,1), false) FROM {table_name}"
                        .format(table_name=table_name))

    # Extracting files
    files = [f for f in backup.namelist() if f.startswith('uploads/')]
    uploader = get_uploader()
    for f in files:
        filename = f.split(os.sep, 1)

        if len(filename
               ) < 2:  # just an empty uploads directory (e.g. uploads/)
            continue

        filename = filename[
            1]  # Get the second entry in the list (the actual filename)
        source = backup.open(f)
        uploader.store(fileobj=source, filename=filename)

    cache.clear()