Esempio n. 1
0
            background_task_ctx,
        ]
    for cleanup_ctx in cleanup_contexts:
        if shutdown_cb := getattr(cleanup_ctx, 'shutdown', None):
            app.on_shutdown.append(shutdown_cb)
    app.cleanup_ctx.extend(cleanup_contexts)
    aiojobs.aiohttp.setup(app, **app['scheduler_opts'])
    cors = aiohttp_cors.setup(app, defaults=app['cors_opts'])
    # should be done in create_app() in other modules.
    cors.add(app.router.add_route('GET', r'', hello))
    cors.add(app.router.add_route('GET', r'/', hello))
    if subapp_pkgs is None:
        subapp_pkgs = []
    for pkg_name in subapp_pkgs:
        if pidx == 0:
            log.info('Loading module: {0}', pkg_name[1:])
        subapp_mod = importlib.import_module(pkg_name, 'ai.backend.gateway')
        init_subapp(pkg_name, app, getattr(subapp_mod, 'create_app'))
    return app


@aiotools.actxmgr
async def server_main(loop: asyncio.AbstractEventLoop, pidx: int,
                      _args: List[Any]) -> AsyncIterator[None]:
    subapp_pkgs = [
        '.etcd',
        '.events',
        '.auth',
        '.ratelimit',
        '.vfolder',
        '.admin',
Esempio n. 2
0
from pathlib import Path
from typing import Dict

from ai.backend.common.logging import BraceStyleAdapter

log = BraceStyleAdapter(logging.getLogger(__name__))

# the names of following AWS variables follow boto3 convention.
s3_access_key = os.environ.get('AWS_ACCESS_KEY_ID', 'dummy-access-key')
s3_secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY', 'dummy-secret-key')
s3_region = os.environ.get('AWS_REGION', 'ap-northeast-1')
s3_bucket = os.environ.get('AWS_S3_BUCKET', 'codeonweb')
s3_bucket_path = os.environ.get('AWS_S3_BUCKET_PATH', 'bucket')

if s3_access_key == 'dummy-access-key':
    log.info('Automatic ~/.output file S3 uploads is disabled.')


def relpath(path, base):
    return Path(path).resolve().relative_to(Path(base).resolve())


def scandir(root: Path, allowed_max_size: int):
    '''
    Scans a directory recursively and returns a dictionary of all files and
    their last modified time.
    '''
    file_stats: Dict[Path, float] = dict()
    if not isinstance(root, Path):
        root = Path(root)
    if not root.exists():
Esempio n. 3
0
    current_version = int(
        Path(
            pkg_resources.resource_filename(
                f'ai.backend.krunner.{distro_name}',
                f'./krunner-version.{distro}.txt')).read_text().strip())
    volume_name = f'backendai-krunner.v{current_version}.{distro}'
    extractor_image = 'backendai-krunner-extractor:latest'

    try:
        for item in (await docker.images.list()):
            if item['RepoTags'] is None:
                continue
            if item['RepoTags'][0] == extractor_image:
                break
        else:
            log.info('preparing the Docker image for krunner extractor...')
            extractor_archive = pkg_resources.resource_filename(
                'ai.backend.runner', 'krunner-extractor.img.tar.xz')
            with lzma.open(extractor_archive, 'rb') as reader:
                proc = await asyncio.create_subprocess_exec(
                    *['docker', 'load'], stdin=reader)
                if (await proc.wait() != 0):
                    raise RuntimeError(
                        'loading krunner extractor image has failed!')

        log.info('checking krunner-env for {}...', distro)
        do_create = False
        try:
            vol = DockerVolume(docker, volume_name)
            await vol.show()
        except DockerError as e: