Beispiel #1
0
def create_image(name, buildfile, files):
    if not can_create_container():
        return False
    folder = tempfile.mkdtemp(prefix='ctfd')
    tmpfile = tempfile.NamedTemporaryFile(dir=folder, delete=False)
    tmpfile.write(buildfile)
    tmpfile.close()

    for f in files:
        if f.filename.strip():
            filename = os.path.basename(f.filename)
            f.save(os.path.join(folder, filename))
    # repository name component must match "[a-z0-9](?:-*[a-z0-9])*(?:[._][a-z0-9](?:-*[a-z0-9])*)*"
    # docker build -f tmpfile.name -t name
    try:
        cmd = ['docker', 'build', '-f', tmpfile.name, '-t', name, folder]
        print(cmd)
        subprocess.call(cmd)
        container = Containers(name, buildfile)
        db.session.add(container)
        db.session.commit()
        db.session.close()
        rmdir(folder)
        return True
    except subprocess.CalledProcessError:
        return False
Beispiel #2
0
def import_image(name):
    try:
        info = json.loads(
            subprocess.check_output(
                ['docker', 'inspect', '--type=image', name]))
        print(info)
        container = Containers(name, "<none>")
        db.session.add(container)
        db.session.commit()
        db.session.close()
        return True
    except subprocess.CalledProcessError:
        return False
Beispiel #3
0
def import_ctf(backup, segments=None, erase=False):
    side_db = dataset.connect(get_config('SQLALCHEMY_DATABASE_URI'))
    if segments is None:
        segments = ['challenges', 'teams', 'both', 'metadata']

    if not zipfile.is_zipfile(backup):
        raise TypeError

    backup = zipfile.ZipFile(backup)

    groups = {
        'challenges': [
            'challenges',
            'files',
            'tags',
            'keys',
            'hints',
        ],
        'teams': [
            'teams',
            'tracking',
            'awards',
        ],
        'both': [
            'solves',
            'wrong_keys',
            'unlocks',
        ],
        'metadata': [
            'alembic_version',
            'config',
            'pages',
            'containers',
        ]
    }

    # Need special handling of metadata
    if 'metadata' in segments:
        meta = groups['metadata']
        segments.remove('metadata')
        meta.remove('alembic_version')

        for item in meta:
            table = side_db[item]
            path = "db/{}.json".format(item)
            data = backup.open(path).read()

            # Some JSON files will be empty
            if data:
                if item == 'config':
                    saved = json.loads(data)
                    for entry in saved['results']:
                        key = entry['key']
                        value = entry['value']
                        set_config(key, value)

                elif item == 'pages':
                    saved = json.loads(data)
                    for entry in saved['results']:
                        route = entry['route']
                        html = entry['html']
                        page = Pages.query.filter_by(route=route).first()
                        if page:
                            page.html = html
                        else:
                            page = Pages(route, html)
                            db.session.add(page)
                        db.session.commit()

                elif item == 'containers':
                    saved = json.loads(data)
                    for entry in saved['results']:
                        name = entry['name']
                        buildfile = entry['buildfile']
                        container = Containers.query.filter_by(
                            name=name).first()
                        if container:
                            container.buildfile = buildfile
                        else:
                            container = Containers(name, buildfile)
                            db.session.add(container)
                        db.session.commit()

    for segment in segments:
        group = groups[segment]
        for item in group:
            table = side_db[item]
            path = "db/{}.json".format(item)
            data = backup.open(path).read()
            if data:
                saved = json.loads(data)
                for entry in saved['results']:
                    entry_id = entry.pop('id', None)
                    # This is a hack to get SQlite to properly accept datetime values from dataset
                    # See Issue #246
                    if get_config('SQLALCHEMY_DATABASE_URI').startswith(
                            'sqlite'):
                        for k, v in entry.items():
                            if isinstance(v, six.string_types):
                                try:
                                    entry[k] = datetime.datetime.strptime(
                                        v, '%Y-%m-%dT%H:%M:%S')
                                except ValueError as e:
                                    pass
                    table.insert(entry)
            else:
                continue

    # Extracting files
    files = [f for f in backup.namelist() if f.startswith('uploads/')]
    upload_folder = app.config.get('UPLOAD_FOLDER')
    for f in files:
        filename = f.split(os.sep, 1)

        if len(filename
               ) < 2:  # just an empty uploads directory (e.g. uploads/)
            continue

        filename = filename[
            1]  # Get the second entry in the list (the actual filename)
        full_path = os.path.join(upload_folder, filename)
        dirname = os.path.dirname(full_path)

        # Create any parent directories for the file
        if not os.path.exists(dirname):
            os.makedirs(dirname)

        source = backup.open(f)
        target = file(full_path, "wb")
        with source, target:
            shutil.copyfileobj(source, target)
Beispiel #4
0
def import_ctf(backup, segments=None, erase=False):
    side_db = dataset.connect(get_config('SQLALCHEMY_DATABASE_URI'))
    if segments is None:
        segments = ['challenges', 'teams', 'both', 'metadata']

    if not zipfile.is_zipfile(backup):
        raise TypeError

    backup = zipfile.ZipFile(backup)

    groups = {
        'challenges': [
            'challenges',
            'files',
            'tags',
            'keys',
            'hints',
        ],
        'teams': [
            'teams',
            'tracking',
            'awards',
        ],
        'both': [
            'solves',
            'wrong_keys',
            'unlocks',
        ],
        'metadata': [
            'alembic_version',
            'config',
            'pages',
            'containers',
        ]
    }

    ## Need special handling of metadata
    if 'metadata' in segments:
        meta = groups['metadata']
        segments.remove('metadata')
        meta.remove('alembic_version')

        for item in meta:
            table = side_db[item]
            path = "db/{}.json".format(item)
            data = backup.open(path).read()

            ## Some JSON files will be empty
            if data:
                if item == 'config':
                    saved = json.loads(data)
                    for entry in saved['results']:
                        key = entry['key']
                        value = entry['value']
                        set_config(key, value)

                elif item == 'pages':
                    saved = json.loads(data)
                    for entry in saved['results']:
                        route = entry['route']
                        html = entry['html']
                        page = Pages.query.filter_by(route=route).first()
                        if page:
                            page.html = html
                        else:
                            page = Pages(route, html)
                            db.session.add(page)
                        db.session.commit()

                elif item == 'containers':
                    saved = json.loads(data)
                    for entry in saved['results']:
                        name = entry['name']
                        buildfile = entry['buildfile']
                        container = Containers.query.filter_by(
                            name=name).first()
                        if container:
                            container.buildfile = buildfile
                        else:
                            container = Containers(name, buildfile)
                            db.session.add(container)
                        db.session.commit()

    for segment in segments:
        group = groups[segment]
        for item in group:
            table = side_db[item]
            path = "db/{}.json".format(item)
            data = backup.open(path).read()
            if data:
                saved = json.loads(data)
                for entry in saved['results']:
                    entry_id = entry.pop('id', None)
                    table.insert(entry)
            else:
                continue