def tag_and_push_image(
    image_params: Union[BuildProdParams, BuildCiParams], dry_run: bool, verbose: bool
) -> Tuple[int, str]:
    """
    Tag and push the image according to parameters.
    :param image_params: parameters of the image
    :param dry_run: whether we are in dry-run mode
    :param verbose: whethere we produce verbose output
    :return:
    """
    console.print(
        f"[blue]Tagging and pushing the {image_params.airflow_image_name} as "
        f"{image_params.airflow_image_name_with_tag}.[/]"
    )
    cmd = construct_docker_tag_command(image_params)
    command_result = run_command(
        cmd, verbose=verbose, dry_run=dry_run, cwd=AIRFLOW_SOURCES_ROOT, text=True, check=False
    )
    if command_result.returncode == 0:
        return_code, info = login_to_docker_registry(image_params, dry_run=dry_run)
        if return_code != 0:
            return return_code, f"Tag and pushing the image {image_params.python}: {info}"
        cmd = construct_docker_push_command(image_params)
        command_result = run_command(
            cmd, verbose=verbose, dry_run=dry_run, cwd=AIRFLOW_SOURCES_ROOT, text=True, check=False
        )
        if command_result.returncode == 0:
            return 0, f"Tag and pushing the image {image_params.python}"
    return command_result.returncode, f"Tag and pushing the image {image_params.python}"
Exemple #2
0
def run_docker_compose_tests(image_name: str, dry_run: bool, verbose: bool,
                             extra_pytest_args: Tuple) -> Tuple[int, str]:
    command_result = run_command(["docker", "inspect", image_name],
                                 dry_run=dry_run,
                                 verbose=verbose,
                                 check=False,
                                 stdout=DEVNULL)
    if command_result.returncode != 0:
        console.print(
            f"[red]Error when inspecting PROD image: {command_result.returncode}[/]"
        )
        return command_result.returncode, f"Testing docker-compose python with {image_name}"
    pytest_args = ("-n", "auto", "--color=yes")
    test_path = AIRFLOW_SOURCES_ROOT / "docker_tests" / "test_docker_compose_quick_start.py"
    env = os.environ.copy()
    env['DOCKER_IMAGE'] = image_name
    command_result = run_command(
        [
            sys.executable, "-m", "pytest",
            str(test_path), *pytest_args, *extra_pytest_args
        ],
        dry_run=dry_run,
        verbose=verbose,
        env=env,
        check=False,
    )
    return command_result.returncode, f"Testing docker-compose python with {image_name}"
def tag_and_push_image(image_params: Union[BuildProdParams, BuildCiParams],
                       dry_run: bool, verbose: bool):
    """
    Tag and push the image according to parameters.
    :param image_params: parameters of the image
    :param dry_run: whether we are in dry-run mode
    :param verbose: whethere we produce verbose output
    :return:
    """
    console.print(
        f"[blue]Tagging and pushing the {image_params.airflow_image_name} as "
        f"{image_params.airflow_image_name_with_tag}.[/]")
    cmd = construct_docker_tag_command(image_params)
    run_command(cmd,
                verbose=verbose,
                dry_run=dry_run,
                cwd=AIRFLOW_SOURCES_ROOT,
                text=True,
                check=True)
    login_to_docker_registry(image_params)
    cmd = construct_docker_push_command(image_params)
    run_command(cmd,
                verbose=verbose,
                dry_run=dry_run,
                cwd=AIRFLOW_SOURCES_ROOT,
                text=True,
                check=True)
Exemple #4
0
def fix_ownership(verbose: bool, dry_run: bool):
    shell_params = ShellParams(
        verbose=verbose,
        mount_sources=MOUNT_ALL,
        python=DEFAULT_PYTHON_MAJOR_MINOR_VERSION,
    )
    extra_docker_flags = get_extra_docker_flags(MOUNT_ALL)
    env = construct_env_variables_docker_compose_command(shell_params)
    cmd = [
        "docker",
        "run",
        "-t",
        *extra_docker_flags,
        "-e",
        "GITHUB_ACTIONS=",
        "-e",
        "SKIP_ENVIRONMENT_INITIALIZATION=true",
        "--pull",
        "never",
        shell_params.airflow_image_name_with_tag,
        "/opt/airflow/scripts/in_container/run_fix_ownership.sh",
    ]
    run_command(cmd,
                verbose=verbose,
                dry_run=dry_run,
                text=True,
                env=env,
                check=False)
    # Always succeed
    sys.exit(0)
Exemple #5
0
def build_image(verbose: bool, dry_run: bool, **kwargs) -> None:
    """
    Builds CI image:

      * fixes group permissions for files (to improve caching when umask is 002)
      * converts all the parameters received via kwargs into BuildCIParams (including cache)
      * prints info about the image to build
      * logs int to docker registry on CI if build cache is being executed
      * removes "tag" for previously build image so that inline cache uses only remote image
      * constructs docker-compose command to run based on parameters passed
      * run the build command
      * update cached information that the build completed and saves checksums of all files
        for quick future check if the build is needed

    :param verbose: print commands when running
    :param dry_run: do not execute "write" commands - just print what would happen
    :param kwargs: arguments passed from the command
    """
    fix_group_permissions()
    parameters_passed = filter_out_none(**kwargs)
    ci_image_params = get_ci_image_build_params(parameters_passed)
    ci_image_params.print_info()
    run_command(
        [
            "docker", "rmi", "--no-prune", "--force",
            ci_image_params.airflow_image_name
        ],
        verbose=verbose,
        dry_run=dry_run,
        cwd=AIRFLOW_SOURCES_ROOT,
        text=True,
        check=False,
    )
    cmd = construct_build_docker_command(
        image_params=ci_image_params,
        verbose=verbose,
        required_args=REQUIRED_CI_IMAGE_ARGS,
        optional_args=OPTIONAL_CI_IMAGE_ARGS,
        production_image=False,
    )
    if ci_image_params.prepare_buildx_cache:
        login_to_docker_registry(ci_image_params)
    console.print(
        f"\n[blue]Building CI Image for Python {ci_image_params.python}\n")
    run_command(cmd,
                verbose=verbose,
                dry_run=dry_run,
                cwd=AIRFLOW_SOURCES_ROOT,
                text=True)
    if not dry_run:
        ci_image_cache_dir = BUILD_CACHE_DIR / ci_image_params.airflow_branch
        ci_image_cache_dir.mkdir(parents=True, exist_ok=True)
        touch_cache_file(f"built_{ci_image_params.python}",
                         root_dir=ci_image_cache_dir)
        calculate_md5_checksum_for_files(ci_image_params.md5sum_cache_dir,
                                         update=True)
    else:
        console.print(
            "[blue]Not updating build cache because we are in `dry_run` mode.[/]"
        )
Exemple #6
0
def static_check(
    verbose: bool,
    dry_run: bool,
    all_files: bool,
    show_diff_on_failure: bool,
    last_commit: bool,
    type: Tuple[str],
    files: bool,
    precommit_args: Tuple,
):
    if check_pre_commit_installed(verbose=verbose):
        command_to_execute = ['pre-commit', 'run']
        for single_check in type:
            command_to_execute.append(single_check)
        if all_files:
            command_to_execute.append("--all-files")
        if show_diff_on_failure:
            command_to_execute.append("--show-diff-on-failure")
        if last_commit:
            command_to_execute.extend(
                ["--from-ref", "HEAD^", "--to-ref", "HEAD"])
        if files:
            command_to_execute.append("--files")
        if verbose:
            command_to_execute.append("--verbose")
        if precommit_args:
            command_to_execute.extend(precommit_args)
        run_command(
            command_to_execute,
            verbose=verbose,
            dry_run=dry_run,
            check=False,
            no_output_dump_on_exception=True,
            text=True,
        )
Exemple #7
0
def build_docs(verbose: bool, dry_run: bool, docs_only: bool,
               spellcheck_only: bool, package_filter: Tuple[str]):
    """
    Builds documentation in the container.

    * figures out CI image name
    * checks if there are enough resources
    * converts parameters into a DocParams class
    """
    params = BuildCiParams()
    ci_image_name = params.airflow_image_name
    check_docker_resources(verbose, ci_image_name)
    doc_builder = DocParams(
        package_filter=package_filter,
        docs_only=docs_only,
        spellcheck_only=spellcheck_only,
    )
    extra_docker_flags = get_extra_docker_flags(MOUNT_SELECTED)
    cmd = []
    cmd.extend(["docker", "run"])
    cmd.extend(extra_docker_flags)
    cmd.extend(["-t", "-e", "GITHUB_ACTIONS="])
    cmd.extend(["--entrypoint", "/usr/local/bin/dumb-init", "--pull", "never"])
    cmd.extend([
        ci_image_name, "--",
        "/opt/airflow/scripts/in_container/run_docs_build.sh"
    ])
    cmd.extend(doc_builder.args_doc_builder)
    run_command(cmd, verbose=verbose, dry_run=dry_run, text=True)
Exemple #8
0
 def check_buildx_plugin_build_command(self):
     build_command_param = []
     is_buildx_available = check_if_buildx_plugin_available(True)
     if is_buildx_available:
         if self.prepare_buildx_cache:
             build_command_param.extend([
                 "buildx", "build", "--builder", "airflow_cache",
                 "--progress=tty"
             ])
             cmd = ['docker', 'buildx', 'inspect', 'airflow_cache']
             output = run_command(cmd, verbose=True, text=True)
             if output.returncode != 0:
                 next_cmd = [
                     'docker', 'buildx', 'create', '--name', 'airflow_cache'
                 ]
                 run_command(next_cmd, verbose=True, text=True)
         else:
             build_command_param.extend([
                 "buildx", "build", "--builder", "default", "--progress=tty"
             ])
     else:
         if self.prepare_buildx_cache:
             console.print(
                 '\n[red] Buildx cli plugin is not available and you need it to prepare buildx cache. \n'
             )
             console.print(
                 '[red] Please install it following https://docs.docker.com/buildx/working-with-buildx/ \n'
             )
             sys.exit()
         build_command_param.append("build")
     return build_command_param
Exemple #9
0
def cleanup(verbose: bool, dry_run: bool):
    console.print("\n[bright_yellow]Removing cache of parameters, images, and cleans up docker cache[/]")
    if click.confirm("Are you sure?"):
        docker_images_command_to_execute = [
            'docker',
            'images',
            '--filter',
            'label=org.apache.airflow.image',
            '--format',
            '{{.Repository}}:{{.Tag}}',
        ]
        process = run_command(
            docker_images_command_to_execute, verbose=verbose, text=True, capture_output=True
        )
        images = process.stdout.splitlines() if process and process.stdout else []
        if images:
            console.print("[light_blue]Removing images:[/]")
            for image in images:
                console.print(f"[light_blue] * {image}[/]")
            console.print()
            docker_rmi_command_to_execute = [
                'docker',
                'rmi',
                '--force',
            ]
            docker_rmi_command_to_execute.extend(images)
            run_command(docker_rmi_command_to_execute, verbose=verbose, dry_run=dry_run, check=False)
        else:
            console.print("[light_blue]No images to remote[/]\n")
        system_prune_command_to_execute = ['docker', 'system', 'prune']
        console.print("Pruning docker images")
        run_command(system_prune_command_to_execute, verbose=verbose, dry_run=dry_run, check=False)
        console.print(f"Removing build cache dir ${BUILD_CACHE_DIR}")
        if not dry_run:
            shutil.rmtree(BUILD_CACHE_DIR, ignore_errors=True)
Exemple #10
0
def stop(verbose: bool, dry_run: bool, preserve_volumes: bool):
    command_to_execute = ['docker-compose', 'down', "--remove-orphans"]
    if not preserve_volumes:
        command_to_execute.append("--volumes")
    shell_params = ShellParams({})
    env_variables = construct_env_variables_docker_compose_command(shell_params)
    run_command(command_to_execute, verbose=verbose, dry_run=dry_run, env=env_variables)
def login_to_docker_registry(build_params: ProdParams):
    if build_params.ci == "true":
        if len(build_params.github_token) == 0:
            console.print(
                "\n[blue]Skip logging in to GitHub Registry. No Token available!"
            )
        elif build_params.airflow_login_to_github_registry != "true":
            console.print("\n[blue]Skip logging in to GitHub Registry.\
                    AIRFLOW_LOGIN_TO_GITHUB_REGISTRY is set as false")
        elif len(build_params.github_token) > 0:
            run_command(['docker', 'logout', 'ghcr.io'],
                        verbose=True,
                        text=True)
            run_command(
                [
                    'docker',
                    'login',
                    '--username',
                    build_params.github_username,
                    '--password-stdin',
                    'ghcr.io',
                ],
                verbose=True,
                text=True,
                input=build_params.github_token,
            )
        else:
            console.print(
                '\n[blue]Skip Login to GitHub Container Registry as token is missing'
            )
Exemple #12
0
def static_check(
    verbose: bool,
    all_files: bool,
    show_diff_on_failure: bool,
    last_commit: bool,
    type: Tuple[str],
    files: bool,
    precommit_args: Tuple,
):
    if check_package_installed('pre_commit'):
        command_to_execute = ['pre-commit', 'run']
        for single_check in type:
            command_to_execute.append(single_check)
        if all_files:
            command_to_execute.append("--all-files")
        if show_diff_on_failure:
            command_to_execute.append("--show-diff-on-failure")
        if last_commit:
            command_to_execute.extend(
                ["--from-ref", "HEAD^", "--to-ref", "HEAD"])
        if files:
            command_to_execute.append("--files")
        if precommit_args:
            command_to_execute.extend(precommit_args)
        run_command(command_to_execute,
                    suppress_raise_exception=True,
                    suppress_console_print=True,
                    text=True)
def check_docker_resources(
    verbose: bool, mount_all_flag: bool, airflow_sources: str, airflow_ci_image_name: str
):
    extra_docker_flags = get_extra_docker_flags(mount_all_flag, airflow_sources)
    cmd = []
    cmd.extend(["docker", "run", "-t"])
    cmd.extend(extra_docker_flags)
    cmd.extend(["--entrypoint", "/bin/bash", airflow_ci_image_name])
    cmd.extend(["-c", "python /opt/airflow/scripts/in_container/run_resource_check.py"])
    run_command(cmd, verbose=verbose, text=True)
Exemple #14
0
def setup_autocomplete(verbose: bool, dry_run: bool, force: bool):
    """
    Enables autocompletion of Breeze2 commands.
    """

    # Determine if the shell is bash/zsh/powershell. It helps to build the autocomplete path
    detected_shell = os.environ.get('SHELL')
    detected_shell = None if detected_shell is None else detected_shell.split(os.sep)[-1]
    if detected_shell not in ['bash', 'zsh', 'fish']:
        console.print(f"\n[red] The shell {detected_shell} is not supported for autocomplete![/]\n")
        sys.exit(1)
    console.print(f"Installing {detected_shell} completion for local user")
    autocomplete_path = (
        AIRFLOW_SOURCES_ROOT / "dev" / "breeze" / "autocomplete" / f"{NAME}-complete-{detected_shell}.sh"
    )
    console.print(f"[bright_blue]Activation command script is available here: {autocomplete_path}[/]\n")
    console.print(
        f"[bright_yellow]We need to add above script to your {detected_shell} profile and "
        "install 'click' package in your default python installation destination.[/]\n"
    )
    if click.confirm("Should we proceed ?"):
        run_command(['pip', 'install', '--upgrade', 'click'], verbose=True, dry_run=dry_run, check=False)
        if detected_shell == 'bash':
            script_path = str(Path('~').expanduser() / '.bash_completion')
            command_to_execute = f"source {autocomplete_path}"
            write_to_shell(command_to_execute, dry_run, script_path, force)
        elif detected_shell == 'zsh':
            script_path = str(Path('~').expanduser() / '.zshrc')
            command_to_execute = f"source {autocomplete_path}"
            write_to_shell(command_to_execute, dry_run, script_path, force)
        elif detected_shell == 'fish':
            # Include steps for fish shell
            script_path = str(Path('~').expanduser() / f'.config/fish/completions/{NAME}.fish')
            if os.path.exists(script_path) and not force:
                console.print(
                    "\n[bright_yellow]Autocompletion is already setup. Skipping. "
                    "You can force autocomplete installation by adding --force/]\n"
                )
            else:
                with open(autocomplete_path) as source_file, open(script_path, 'w') as destination_file:
                    for line in source_file:
                        destination_file.write(line)
        else:
            # Include steps for powershell
            subprocess.check_call(['powershell', 'Set-ExecutionPolicy Unrestricted -Scope CurrentUser'])
            script_path = (
                subprocess.check_output(['powershell', '-NoProfile', 'echo $profile']).decode("utf-8").strip()
            )
            command_to_execute = f". {autocomplete_path}"
            write_to_shell(command_to_execute, dry_run, script_path, force)
    else:
        console.print(
            "\nPlease follow the https://click.palletsprojects.com/en/8.1.x/shell-completion/ "
            "to setup autocompletion for breeze manually if you want to use it.\n"
        )
Exemple #15
0
def build_production_image(verbose: bool, dry_run: bool, **kwargs):
    """
    Builds PROD image:

      * fixes group permissions for files (to improve caching when umask is 002)
      * converts all the parameters received via kwargs into BuildProdParams (including cache)
      * prints info about the image to build
      * removes docker-context-files if requested
      * performs sanity check if the files are present in docker-context-files if expected
      * logs int to docker registry on CI if build cache is being executed
      * removes "tag" for previously build image so that inline cache uses only remote image
      * constructs docker-compose command to run based on parameters passed
      * run the build command
      * update cached information that the build completed and saves checksums of all files
        for quick future check if the build is needed

    :param verbose: print commands when running
    :param dry_run: do not execute "write" commands - just print what would happen
    :param kwargs: arguments passed from the command
    """
    fix_group_permissions()
    parameters_passed = filter_out_none(**kwargs)
    prod_image_params = get_prod_image_build_params(parameters_passed)
    prod_image_params.print_info()
    if prod_image_params.cleanup_docker_context_files:
        clean_docker_context_files()
    check_docker_context_files(prod_image_params.install_docker_context_files)
    if prod_image_params.prepare_buildx_cache:
        login_to_docker_registry(prod_image_params)
    run_command(
        [
            "docker", "rmi", "--no-prune", "--force",
            prod_image_params.airflow_image_name
        ],
        verbose=verbose,
        dry_run=dry_run,
        cwd=AIRFLOW_SOURCES_ROOT,
        text=True,
        check=False,
    )
    console.print(
        f"\n[blue]Building PROD Image for Python {prod_image_params.python}\n")
    cmd = construct_build_docker_command(
        image_params=prod_image_params,
        verbose=verbose,
        required_args=REQUIRED_PROD_IMAGE_ARGS,
        optional_args=OPTIONAL_PROD_IMAGE_ARGS,
        production_image=True,
    )
    run_command(cmd,
                verbose=verbose,
                dry_run=dry_run,
                cwd=AIRFLOW_SOURCES_ROOT,
                text=True)
Exemple #16
0
def run_shell_with_build_image_checks(verbose: bool, dry_run: bool,
                                      shell_params: ShellParams):
    """
    Executes shell command built from params passed, checking if build is not needed.
    * checks if there are enough resources to run shell
    * checks if image was built at least once (if not - forces the build)
    * if not forces, checks if build is needed and asks the user if so
    * builds the image if needed
    * prints information about the build
    * constructs docker compose command to enter shell
    * executes it

    :param verbose: print commands when running
    :param dry_run: do not execute "write" commands - just print what would happen
    :param shell_params: parameters of the execution
    """
    check_docker_resources(verbose, shell_params.airflow_image_name)
    build_ci_image_check_cache = Path(BUILD_CACHE_DIR,
                                      shell_params.airflow_branch,
                                      f".built_{shell_params.python}")
    if build_ci_image_check_cache.exists():
        console.print(
            f'[bright_blue]{shell_params.the_image_type} image already built locally.[/]'
        )
    else:
        console.print(
            f'[bright_yellow]{shell_params.the_image_type} image not built locally. '
            f'Forcing build.[/]')
        shell_params.force_build = True

    if not shell_params.force_build:
        build_image_if_needed_steps(verbose, dry_run, shell_params)
    else:
        build_image(
            verbose,
            dry_run=dry_run,
            python=shell_params.python,
            upgrade_to_newer_dependencies="false",
        )
    shell_params.print_badge_info()
    cmd = [
        'docker-compose', 'run', '--service-ports', "-e", "BREEZE", '--rm',
        'airflow'
    ]
    cmd_added = shell_params.command_passed
    env_variables = construct_env_variables_docker_compose_command(
        shell_params)
    if cmd_added is not None:
        cmd.extend(['-c', cmd_added])
    run_command(cmd,
                verbose=verbose,
                dry_run=dry_run,
                env=env_variables,
                text=True)
def check_docker_resources(verbose: bool, airflow_image_name: str):
    """
    Check if we have enough resources to run docker. This is done via running script embedded in our image.
    :param verbose: print commands when running
    :param airflow_image_name: name of the airflow image to use.
    """
    extra_docker_flags = get_extra_docker_flags(MOUNT_NONE)
    cmd = []
    cmd.extend(["docker", "run", "-t"])
    cmd.extend(extra_docker_flags)
    cmd.extend(["--entrypoint", "/bin/bash", airflow_image_name])
    cmd.extend([
        "-c", "python /opt/airflow/scripts/in_container/run_resource_check.py"
    ])
    run_command(cmd, verbose=verbose, text=True)
Exemple #18
0
def check_docker_version(verbose: bool):
    """
    Checks if the docker compose version is as expected (including some specific modifications done by
    some vendors such as Microsoft (they might have modified version of docker-compose/docker in their
    cloud. In case docker compose version is wrong we continue but print warning for the user.

    :param verbose: print commands when running
    """
    permission_denied = check_docker_permission(verbose)
    if not permission_denied:
        docker_version_command = [
            'docker', 'version', '--format', '{{.Client.Version}}'
        ]
        docker_version = ''
        docker_version_output = run_command(
            docker_version_command,
            verbose=verbose,
            no_output_dump_on_exception=True,
            capture_output=True,
            text=True,
        )
        if docker_version_output.returncode == 0:
            docker_version = docker_version_output.stdout.strip()
        if docker_version == '':
            console.print(
                f'Your version of docker is unknown. If the scripts fail, please make sure to \
                    install docker at least: {MIN_DOCKER_VERSION} version.')
        else:
            good_version = compare_version(docker_version, MIN_DOCKER_VERSION)
            if good_version:
                console.print(f'Good version of Docker: {docker_version}.')
            else:
                console.print(
                    f'Your version of docker is too old:{docker_version}. Please upgrade to \
                    at least {MIN_DOCKER_VERSION}')
Exemple #19
0
def build_image_checks(verbose: bool, shell_params: ShellBuilder):
    fix_group_permissions()
    build_ci_image_check_cache = Path(BUILD_CACHE_DIR,
                                      shell_params.airflow_branch,
                                      f".built_{shell_params.python_version}")
    if build_ci_image_check_cache.exists():
        console.print(
            f'{shell_params.the_image_type} image already built locally.')
    else:
        console.print(f'{shell_params.the_image_type} image not built locally')

    if not shell_params.force_build:
        build_image_if_needed_steps(verbose, shell_params)
    else:
        build_image(
            verbose,
            python_version=shell_params.python_version,
            upgrade_to_newer_dependencies="false",
        )

    instruct_for_setup()
    check_docker_resources(verbose, str(shell_params.airflow_sources),
                           shell_params.airflow_ci_image_name)
    cmd = ['docker-compose', 'run', '--service-ports', '--rm', 'airflow']
    cmd_added = shell_params.command_passed
    env_variables = construct_env_variables_docker_compose_command(
        shell_params)
    if cmd_added is not None:
        cmd.extend(['-c', cmd_added])
    if verbose:
        shell_params.print_badge_info()
    output = run_command(cmd, verbose=verbose, env=env_variables, text=True)
    if verbose:
        console.print(f"[blue]{output}[/]")
def check_docker_permission_denied(verbose) -> bool:
    """
    Checks if we have permission to write to docker socket. By default, on Linux you need to add your user
    to docker group and some new users do not realize that. We help those users if we have
    permission to run docker commands.

    :param verbose: print commands when running
    :return: True if permission is denied.
    """
    permission_denied = False
    docker_permission_command = ["docker", "info"]
    command_result = run_command(
        docker_permission_command,
        verbose=verbose,
        no_output_dump_on_exception=True,
        capture_output=True,
        text=True,
        check=False,
    )
    if command_result.returncode != 0:
        permission_denied = True
        if command_result.stdout and 'Got permission denied while trying to connect' in command_result.stdout:
            console.print('ERROR: You have `permission denied` error when trying to communicate with docker.')
            console.print(
                'Most likely you need to add your user to `docker` group: \
                https://docs.docker.com/ engine/install/linux-postinstall/ .'
            )
    return permission_denied
def check_docker_resources(
    verbose: bool, airflow_image_name: str, dry_run: bool
) -> Union[subprocess.CompletedProcess, subprocess.CalledProcessError]:
    """
    Check if we have enough resources to run docker. This is done via running script embedded in our image.
    :param verbose: print commands when running
    :param dry_run: whether to run it in dry run mode
    :param airflow_image_name: name of the airflow image to use.
    """
    return run_command(
        cmd=[
            "docker",
            "run",
            "-t",
            "--entrypoint",
            "/bin/bash",
            "-e",
            "PYTHONDONTWRITEBYTECODE=true",
            airflow_image_name,
            "-c",
            "python /opt/airflow/scripts/in_container/run_resource_check.py",
        ],
        verbose=verbose,
        dry_run=dry_run,
        text=True,
    )
Exemple #22
0
 def airflow_version_for_production_image(self):
     cmd = [
         'docker', 'run', '--entrypoint', '/bin/bash',
         f'{self.airflow_image_name}'
     ]
     cmd.extend(['-c', 'echo "${AIRFLOW_VERSION}"'])
     output = run_command(cmd, capture_output=True, text=True)
     return output.stdout.strip()
Exemple #23
0
def get_host_group_id():
    host_group_id = ''
    os = get_host_os()
    if os == 'Linux' or os == 'Darwin':
        host_group_id = run_command(cmd=['id', '-gr'],
                                    capture_output=True,
                                    text=True).stdout.strip()
    return host_group_id
Exemple #24
0
def build_image(verbose, **kwargs):
    ci_image_params = BuildParams(**filter_out_none(**kwargs))
    is_cached, value = check_cache_and_write_if_not_cached(
        "PYTHON_MAJOR_MINOR_VERSION", ci_image_params.python_version)
    if is_cached:
        ci_image_params.python_version = value
    cmd = construct_docker_command(ci_image_params)
    output = run_command(cmd, verbose=verbose, text=True)
    console.print(f"[blue]{output}")
Exemple #25
0
def login_to_docker_registry(image_params: Union[BuildProdParams,
                                                 BuildCiParams],
                             dry_run: bool) -> Tuple[int, str]:
    """
    In case of CI environment, we need to login to GitHub Registry if we want to prepare cache.
    This method logs in using the params specified.

    :param image_params: parameters to use for Building prod image
    :param dry_run: whether we are in dry_run mode
    """
    if os.environ.get("CI"):
        if len(image_params.github_token) == 0:
            console.print(
                "\n[bright_blue]Skip logging in to GitHub Registry. No Token available!"
            )
        elif image_params.login_to_github_registry != "true":
            console.print("\n[bright_blue]Skip logging in to GitHub Registry.\
                    LOGIN_TO_GITHUB_REGISTRY is set as false")
        elif len(image_params.github_token) > 0:
            run_command(['docker', 'logout', 'ghcr.io'],
                        verbose=True,
                        text=False,
                        check=False)
            command_result = run_command(
                [
                    'docker',
                    'login',
                    '--username',
                    image_params.github_username,
                    '--password-stdin',
                    'ghcr.io',
                ],
                verbose=True,
                text=True,
                input=image_params.github_token,
                check=False,
            )
            return command_result.returncode, "Docker login"
        else:
            console.print(
                '\n[bright_blue]Skip Login to GitHub Container Registry as token is missing'
            )
    return 0, "Docker login skipped"
Exemple #26
0
def build(
    verbose: bool,
    mount_all_flag: bool,
    airflow_sources: str,
    airflow_ci_image_name: str,
    doc_builder: DocBuilder,
):
    extra_docker_flags = get_extra_docker_flags(mount_all_flag,
                                                airflow_sources)
    cmd = []
    cmd.extend(["docker", "run"])
    cmd.extend(extra_docker_flags)
    cmd.extend(["-t", "-e", "GITHUB_ACTIONS="])
    cmd.extend(["--entrypoint", "/usr/local/bin/dumb-init", "--pull", "never"])
    cmd.extend([
        airflow_ci_image_name, "--",
        "/opt/airflow/scripts/in_container/run_docs_build.sh"
    ])
    cmd.extend(doc_builder.args_doc_builder)
    run_command(cmd, verbose=verbose, text=True)
Exemple #27
0
def build_image(verbose, **kwargs):
    parameters_passed = filter_out_none(**kwargs)
    ci_image_params = get_image_build_params(parameters_passed)
    ci_image_cache_dir = Path(BUILD_CACHE_DIR, ci_image_params.airflow_branch)
    ci_image_cache_dir.mkdir(parents=True, exist_ok=True)
    touch_cache_file(
        f"built_{ci_image_params.python_version}",
        root_dir=ci_image_cache_dir,
    )
    cmd = construct_docker_command(ci_image_params)
    output = run_command(cmd, verbose=verbose, cwd=AIRFLOW_SOURCE, text=True)
    console.print(f"[blue]{output}")
def build_production_image(verbose, **kwargs):
    parameters_passed = filter_out_none(**kwargs)
    prod_params = get_image_build_params(parameters_passed)
    prod_params.print_info()
    if prod_params.cleanup_docker_context_files:
        clean_docker_context_files()
    check_docker_context_files(prod_params.install_docker_context_files)
    if prod_params.skip_building_prod_image:
        console.print(
            '[bright_yellow]\nSkip building production image. Assume the one we have is good!'
        )
        console.print(
            'bright_yellow]\nYou must run Breeze2 build-prod-image before for all python versions!'
        )
    if prod_params.prepare_buildx_cache:
        login_to_docker_registry(prod_params)

    cmd = construct_docker_command(prod_params)
    run_command(
        [
            "docker", "rmi", "--no-prune", "--force",
            prod_params.airflow_prod_image_name
        ],
        verbose=verbose,
        cwd=AIRFLOW_SOURCE,
        text=True,
        suppress_raise_exception=True,
    )
    run_command(cmd, verbose=verbose, cwd=AIRFLOW_SOURCE, text=True)
    if prod_params.prepare_buildx_cache:
        run_command(['docker', 'push', prod_params.airflow_prod_image_name],
                    verbose=True,
                    text=True)
Exemple #29
0
def static_checks(
    verbose: bool,
    dry_run: bool,
    github_repository: str,
    all_files: bool,
    show_diff_on_failure: bool,
    last_commit: bool,
    commit_ref: str,
    type: Tuple[str],
    files: bool,
    precommit_args: Tuple,
):
    assert_pre_commit_installed(verbose=verbose)
    command_to_execute = [sys.executable, "-m", "pre_commit", 'run']
    if last_commit and commit_ref:
        console.print(
            "\n[red]You cannot specify both --last-commit and --commit-ref[/]\n"
        )
        sys.exit(1)
    for single_check in type:
        command_to_execute.append(single_check)
    if all_files:
        command_to_execute.append("--all-files")
    if show_diff_on_failure:
        command_to_execute.append("--show-diff-on-failure")
    if last_commit:
        command_to_execute.extend(["--from-ref", "HEAD^", "--to-ref", "HEAD"])
    if commit_ref:
        command_to_execute.extend(
            ["--from-ref", f"{commit_ref}^", "--to-ref", f"{commit_ref}"])
    if files:
        command_to_execute.append("--files")
    if verbose or dry_run:
        command_to_execute.append("--verbose")
    if precommit_args:
        command_to_execute.extend(precommit_args)
    env = os.environ.copy()
    env['GITHUB_REPOSITORY'] = github_repository
    static_checks_result = run_command(
        command_to_execute,
        verbose=verbose,
        dry_run=dry_run,
        check=False,
        no_output_dump_on_exception=True,
        text=True,
        env=env,
    )
    if static_checks_result.returncode != 0:
        console.print(
            "[red]There were errors during pre-commit check. They should be fixed[/]"
        )
    sys.exit(static_checks_result.returncode)
Exemple #30
0
def login_to_docker_registry(image_params: Any):
    """
    In case of CI environment, we need to login to GitHub Registry if we want to prepare cache.
    This method logs in using the params specified.

    :param image_params: parameters to use for Building prod image
    """
    if os.environ.get("CI"):
        if len(image_params.github_token) == 0:
            console.print(
                "\n[bright_blue]Skip logging in to GitHub Registry. No Token available!"
            )
        elif image_params.airflow_login_to_github_registry != "true":
            console.print("\n[bright_blue]Skip logging in to GitHub Registry.\
                    AIRFLOW_LOGIN_TO_GITHUB_REGISTRY is set as false")
        elif len(image_params.github_token) > 0:
            run_command(['docker', 'logout', 'ghcr.io'],
                        verbose=True,
                        text=True)
            run_command(
                [
                    'docker',
                    'login',
                    '--username',
                    image_params.github_username,
                    '--password-stdin',
                    'ghcr.io',
                ],
                verbose=True,
                text=True,
                input=image_params.github_token,
            )
        else:
            console.print(
                '\n[bright_blue]Skip Login to GitHub Container Registry as token is missing'
            )