예제 #1
0
 def convert(self, value, param, ctx):
     param_name = param.envvar if param.envvar else param.name.upper()
     if isinstance(value, CacheableDefault):
         is_cached, new_value = read_and_validate_value_from_cache(
             param_name, value.value)
         if not is_cached:
             console.print(f"\n[bright_blue]Default value of {param.name} "
                           f"parameter {new_value} used.[/]\n")
     else:
         allowed, allowed_values = check_if_values_allowed(
             param_name, value)
         if allowed:
             new_value = value
             write_to_cache_file(param_name,
                                 new_value,
                                 check_allowed_values=False)
         else:
             new_value = allowed_values[0]
             console.print(
                 f"\n[yellow]The value {value} is not allowed for parameter {param.name}. "
                 f"Setting default value to {new_value}")
             write_to_cache_file(param_name,
                                 new_value,
                                 check_allowed_values=False)
     return super().convert(new_value, param, ctx)
예제 #2
0
def instruct_build_image(python: str):
    """Print instructions to the user that they should build the image"""
    console.print(f'[bright_yellow]\nThe CI image for '
                  f'python version {python} may be outdated[/]\n')
    print(
        f"\n[yellow]Please run at the earliest convenience:[/]\n\nbreeze build-image --python {python}\n\n"
    )
예제 #3
0
def build_image(verbose: bool, dry_run: bool, **kwargs) -> None:
    """
    Builds CI image:

      * fixes group permissions for files (to improve caching when umask is 002)
      * converts all the parameters received via kwargs into BuildCIParams (including cache)
      * prints info about the image to build
      * logs int to docker registry on CI if build cache is being executed
      * removes "tag" for previously build image so that inline cache uses only remote image
      * constructs docker-compose command to run based on parameters passed
      * run the build command
      * update cached information that the build completed and saves checksums of all files
        for quick future check if the build is needed

    :param verbose: print commands when running
    :param dry_run: do not execute "write" commands - just print what would happen
    :param kwargs: arguments passed from the command
    """
    fix_group_permissions()
    parameters_passed = filter_out_none(**kwargs)
    ci_image_params = get_ci_image_build_params(parameters_passed)
    ci_image_params.print_info()
    run_command(
        [
            "docker", "rmi", "--no-prune", "--force",
            ci_image_params.airflow_image_name
        ],
        verbose=verbose,
        dry_run=dry_run,
        cwd=AIRFLOW_SOURCES_ROOT,
        text=True,
        check=False,
    )
    cmd = construct_build_docker_command(
        image_params=ci_image_params,
        verbose=verbose,
        required_args=REQUIRED_CI_IMAGE_ARGS,
        optional_args=OPTIONAL_CI_IMAGE_ARGS,
        production_image=False,
    )
    if ci_image_params.prepare_buildx_cache:
        login_to_docker_registry(ci_image_params)
    console.print(
        f"\n[blue]Building CI Image for Python {ci_image_params.python}\n")
    run_command(cmd,
                verbose=verbose,
                dry_run=dry_run,
                cwd=AIRFLOW_SOURCES_ROOT,
                text=True)
    if not dry_run:
        ci_image_cache_dir = BUILD_CACHE_DIR / ci_image_params.airflow_branch
        ci_image_cache_dir.mkdir(parents=True, exist_ok=True)
        touch_cache_file(f"built_{ci_image_params.python}",
                         root_dir=ci_image_cache_dir)
        calculate_md5_checksum_for_files(ci_image_params.md5sum_cache_dir,
                                         update=True)
    else:
        console.print(
            "[blue]Not updating build cache because we are in `dry_run` mode.[/]"
        )
예제 #4
0
def check_docker_permission_denied(verbose) -> bool:
    """
    Checks if we have permission to write to docker socket. By default, on Linux you need to add your user
    to docker group and some new users do not realize that. We help those users if we have
    permission to run docker commands.

    :param verbose: print commands when running
    :return: True if permission is denied.
    """
    permission_denied = False
    docker_permission_command = ["docker", "info"]
    command_result = run_command(
        docker_permission_command,
        verbose=verbose,
        no_output_dump_on_exception=True,
        capture_output=True,
        text=True,
        check=False,
    )
    if command_result.returncode != 0:
        permission_denied = True
        if command_result.stdout and 'Got permission denied while trying to connect' in command_result.stdout:
            console.print('ERROR: You have `permission denied` error when trying to communicate with docker.')
            console.print(
                'Most likely you need to add your user to `docker` group: \
                https://docs.docker.com/ engine/install/linux-postinstall/ .'
            )
    return permission_denied
예제 #5
0
def tag_and_push_image(image_params: Union[BuildProdParams, BuildCiParams],
                       dry_run: bool, verbose: bool):
    """
    Tag and push the image according to parameters.
    :param image_params: parameters of the image
    :param dry_run: whether we are in dry-run mode
    :param verbose: whethere we produce verbose output
    :return:
    """
    console.print(
        f"[blue]Tagging and pushing the {image_params.airflow_image_name} as "
        f"{image_params.airflow_image_name_with_tag}.[/]")
    cmd = construct_docker_tag_command(image_params)
    run_command(cmd,
                verbose=verbose,
                dry_run=dry_run,
                cwd=AIRFLOW_SOURCES_ROOT,
                text=True,
                check=True)
    login_to_docker_registry(image_params)
    cmd = construct_docker_push_command(image_params)
    run_command(cmd,
                verbose=verbose,
                dry_run=dry_run,
                cwd=AIRFLOW_SOURCES_ROOT,
                text=True,
                check=True)
예제 #6
0
def tag_and_push_image(
    image_params: Union[BuildProdParams, BuildCiParams], dry_run: bool, verbose: bool
) -> Tuple[int, str]:
    """
    Tag and push the image according to parameters.
    :param image_params: parameters of the image
    :param dry_run: whether we are in dry-run mode
    :param verbose: whethere we produce verbose output
    :return:
    """
    console.print(
        f"[blue]Tagging and pushing the {image_params.airflow_image_name} as "
        f"{image_params.airflow_image_name_with_tag}.[/]"
    )
    cmd = construct_docker_tag_command(image_params)
    command_result = run_command(
        cmd, verbose=verbose, dry_run=dry_run, cwd=AIRFLOW_SOURCES_ROOT, text=True, check=False
    )
    if command_result.returncode == 0:
        return_code, info = login_to_docker_registry(image_params, dry_run=dry_run)
        if return_code != 0:
            return return_code, f"Tag and pushing the image {image_params.python}: {info}"
        cmd = construct_docker_push_command(image_params)
        command_result = run_command(
            cmd, verbose=verbose, dry_run=dry_run, cwd=AIRFLOW_SOURCES_ROOT, text=True, check=False
        )
        if command_result.returncode == 0:
            return 0, f"Tag and pushing the image {image_params.python}"
    return command_result.returncode, f"Tag and pushing the image {image_params.python}"
예제 #7
0
def run_docker_compose_tests(image_name: str, dry_run: bool, verbose: bool,
                             extra_pytest_args: Tuple) -> Tuple[int, str]:
    command_result = run_command(["docker", "inspect", image_name],
                                 dry_run=dry_run,
                                 verbose=verbose,
                                 check=False,
                                 stdout=DEVNULL)
    if command_result.returncode != 0:
        console.print(
            f"[red]Error when inspecting PROD image: {command_result.returncode}[/]"
        )
        return command_result.returncode, f"Testing docker-compose python with {image_name}"
    pytest_args = ("-n", "auto", "--color=yes")
    test_path = AIRFLOW_SOURCES_ROOT / "docker_tests" / "test_docker_compose_quick_start.py"
    env = os.environ.copy()
    env['DOCKER_IMAGE'] = image_name
    command_result = run_command(
        [
            sys.executable, "-m", "pytest",
            str(test_path), *pytest_args, *extra_pytest_args
        ],
        dry_run=dry_run,
        verbose=verbose,
        env=env,
        check=False,
    )
    return command_result.returncode, f"Testing docker-compose python with {image_name}"
예제 #8
0
def find_airflow_sources_root() -> Path:
    """
    Find the root of airflow sources. When Breeze is run from sources, it is easy, but this one also
    has to handle the case when Breeze is installed via `pipx` so it searches upwards of the current
    directory to find the right root of airflow directory.

    If not found, current directory is returned (this handles the case when Breeze is run from the local
    directory.

    :return: Path for the found sources.

    """
    default_airflow_sources_root = Path.cwd()
    # Try to find airflow sources in current working dir
    airflow_sources_root = search_upwards_for_airflow_sources_root(Path.cwd())
    if not airflow_sources_root:
        # Or if it fails, find it in parents of the directory where the ./breeze.py is.
        airflow_sources_root = search_upwards_for_airflow_sources_root(Path(__file__).resolve().parent)
    if airflow_sources_root:
        os.chdir(airflow_sources_root)
        return Path(airflow_sources_root)
    else:
        console.print(
            f"\n[bright_yellow]Could not find Airflow sources location. "
            f"Assuming {default_airflow_sources_root}"
        )
    os.chdir(default_airflow_sources_root)
    return Path(default_airflow_sources_root)
예제 #9
0
 def run_build(ci_image_params: BuildCiParams) -> None:
     return_code, info = build_ci_image(verbose=verbose,
                                        dry_run=dry_run,
                                        with_ci_group=with_ci_group,
                                        ci_image_params=ci_image_params)
     if return_code != 0:
         console.print(f"[red]Error when building image! {info}")
         sys.exit(return_code)
예제 #10
0
def build_ci_image(
    verbose: bool,
    dry_run: bool,
    additional_extras: Optional[str],
    python: str,
    image_tag: Optional[str],
    additional_dev_apt_deps: Optional[str],
    additional_runtime_apt_deps: Optional[str],
    additional_python_deps: Optional[str],
    additional_dev_apt_command: Optional[str],
    additional_runtime_apt_command: Optional[str],
    additional_dev_apt_env: Optional[str],
    additional_runtime_apt_env: Optional[str],
    dev_apt_command: Optional[str],
    dev_apt_deps: Optional[str],
    install_providers_from_sources: bool,
    runtime_apt_command: Optional[str],
    runtime_apt_deps: Optional[str],
    github_repository: Optional[str],
    docker_cache: Optional[str],
    platform: Optional[str],
    debian_version: Optional[str],
    prepare_buildx_cache: bool,
    answer: Optional[str],
    upgrade_to_newer_dependencies: str = "false",
):
    """Build CI image."""
    set_forced_answer(answer)
    if verbose:
        console.print(
            f"\n[bright_blue]Building image of airflow from {AIRFLOW_SOURCES_ROOT} "
            f"python version: {python}[/]\n")
    build_image(
        verbose=verbose,
        dry_run=dry_run,
        additional_extras=additional_extras,
        python=python,
        image_tag=image_tag,
        additional_dev_apt_deps=additional_dev_apt_deps,
        additional_runtime_apt_deps=additional_runtime_apt_deps,
        additional_python_deps=additional_python_deps,
        additional_runtime_apt_command=additional_runtime_apt_command,
        additional_dev_apt_command=additional_dev_apt_command,
        additional_dev_apt_env=additional_dev_apt_env,
        additional_runtime_apt_env=additional_runtime_apt_env,
        install_providers_from_sources=install_providers_from_sources,
        dev_apt_command=dev_apt_command,
        dev_apt_deps=dev_apt_deps,
        runtime_apt_command=runtime_apt_command,
        runtime_apt_deps=runtime_apt_deps,
        github_repository=github_repository,
        docker_cache=docker_cache,
        platform=platform,
        debian_version=debian_version,
        prepare_buildx_cache=prepare_buildx_cache,
        upgrade_to_newer_dependencies=upgrade_to_newer_dependencies,
    )
예제 #11
0
def warn_different_location(installation_airflow_sources: Path,
                            current_airflow_sources: Path):
    console.print(
        f"\n[bright_yellow]WARNING! Breeze was installed from "
        f"different location![/]\n\n"
        f"Breeze installed from   : {installation_airflow_sources}\n"
        f"Current Airflow sources : {current_airflow_sources}\n\n"
        f"[bright_yellow]This might cause various problems!![/]\n\n"
        f"If you experience problems - reinstall Breeze with:\n\n"
        f"    {NAME} self-upgrade --force --use-current-airflow-sources\n\n")
예제 #12
0
def build_production_image(verbose: bool, dry_run: bool, **kwargs):
    """
    Builds PROD image:

      * fixes group permissions for files (to improve caching when umask is 002)
      * converts all the parameters received via kwargs into BuildProdParams (including cache)
      * prints info about the image to build
      * removes docker-context-files if requested
      * performs sanity check if the files are present in docker-context-files if expected
      * logs int to docker registry on CI if build cache is being executed
      * removes "tag" for previously build image so that inline cache uses only remote image
      * constructs docker-compose command to run based on parameters passed
      * run the build command
      * update cached information that the build completed and saves checksums of all files
        for quick future check if the build is needed

    :param verbose: print commands when running
    :param dry_run: do not execute "write" commands - just print what would happen
    :param kwargs: arguments passed from the command
    """
    fix_group_permissions()
    parameters_passed = filter_out_none(**kwargs)
    prod_image_params = get_prod_image_build_params(parameters_passed)
    prod_image_params.print_info()
    if prod_image_params.cleanup_docker_context_files:
        clean_docker_context_files()
    check_docker_context_files(prod_image_params.install_docker_context_files)
    if prod_image_params.prepare_buildx_cache:
        login_to_docker_registry(prod_image_params)
    run_command(
        [
            "docker", "rmi", "--no-prune", "--force",
            prod_image_params.airflow_image_name
        ],
        verbose=verbose,
        dry_run=dry_run,
        cwd=AIRFLOW_SOURCES_ROOT,
        text=True,
        check=False,
    )
    console.print(
        f"\n[blue]Building PROD Image for Python {prod_image_params.python}\n")
    cmd = construct_build_docker_command(
        image_params=prod_image_params,
        verbose=verbose,
        required_args=REQUIRED_PROD_IMAGE_ARGS,
        optional_args=OPTIONAL_PROD_IMAGE_ARGS,
        production_image=True,
    )
    run_command(cmd,
                verbose=verbose,
                dry_run=dry_run,
                cwd=AIRFLOW_SOURCES_ROOT,
                text=True)
예제 #13
0
def run_shell_with_build_image_checks(verbose: bool, dry_run: bool,
                                      shell_params: ShellParams):
    """
    Executes shell command built from params passed, checking if build is not needed.
    * checks if there are enough resources to run shell
    * checks if image was built at least once (if not - forces the build)
    * if not forces, checks if build is needed and asks the user if so
    * builds the image if needed
    * prints information about the build
    * constructs docker compose command to enter shell
    * executes it

    :param verbose: print commands when running
    :param dry_run: do not execute "write" commands - just print what would happen
    :param shell_params: parameters of the execution
    """
    check_docker_resources(verbose, shell_params.airflow_image_name)
    build_ci_image_check_cache = Path(BUILD_CACHE_DIR,
                                      shell_params.airflow_branch,
                                      f".built_{shell_params.python}")
    if build_ci_image_check_cache.exists():
        console.print(
            f'[bright_blue]{shell_params.the_image_type} image already built locally.[/]'
        )
    else:
        console.print(
            f'[bright_yellow]{shell_params.the_image_type} image not built locally. '
            f'Forcing build.[/]')
        shell_params.force_build = True

    if not shell_params.force_build:
        build_image_if_needed_steps(verbose, dry_run, shell_params)
    else:
        build_image(
            verbose,
            dry_run=dry_run,
            python=shell_params.python,
            upgrade_to_newer_dependencies="false",
        )
    shell_params.print_badge_info()
    cmd = [
        'docker-compose', 'run', '--service-ports', "-e", "BREEZE", '--rm',
        'airflow'
    ]
    cmd_added = shell_params.command_passed
    env_variables = construct_env_variables_docker_compose_command(
        shell_params)
    if cmd_added is not None:
        cmd.extend(['-c', cmd_added])
    run_command(cmd,
                verbose=verbose,
                dry_run=dry_run,
                env=env_variables,
                text=True)
예제 #14
0
def static_checks(
    verbose: bool,
    dry_run: bool,
    github_repository: str,
    all_files: bool,
    show_diff_on_failure: bool,
    last_commit: bool,
    commit_ref: str,
    type: Tuple[str],
    files: bool,
    precommit_args: Tuple,
):
    assert_pre_commit_installed(verbose=verbose)
    command_to_execute = [sys.executable, "-m", "pre_commit", 'run']
    if last_commit and commit_ref:
        console.print(
            "\n[red]You cannot specify both --last-commit and --commit-ref[/]\n"
        )
        sys.exit(1)
    for single_check in type:
        command_to_execute.append(single_check)
    if all_files:
        command_to_execute.append("--all-files")
    if show_diff_on_failure:
        command_to_execute.append("--show-diff-on-failure")
    if last_commit:
        command_to_execute.extend(["--from-ref", "HEAD^", "--to-ref", "HEAD"])
    if commit_ref:
        command_to_execute.extend(
            ["--from-ref", f"{commit_ref}^", "--to-ref", f"{commit_ref}"])
    if files:
        command_to_execute.append("--files")
    if verbose or dry_run:
        command_to_execute.append("--verbose")
    if precommit_args:
        command_to_execute.extend(precommit_args)
    env = os.environ.copy()
    env['GITHUB_REPOSITORY'] = github_repository
    static_checks_result = run_command(
        command_to_execute,
        verbose=verbose,
        dry_run=dry_run,
        check=False,
        no_output_dump_on_exception=True,
        text=True,
        env=env,
    )
    if static_checks_result.returncode != 0:
        console.print(
            "[red]There were errors during pre-commit check. They should be fixed[/]"
        )
    sys.exit(static_checks_result.returncode)
예제 #15
0
def pull_image(
    verbose: bool,
    dry_run: bool,
    python: str,
    github_repository: str,
    run_in_parallel: bool,
    python_versions: str,
    parallelism: int,
    image_tag: Optional[str],
    wait_for_image: bool,
    tag_as_latest: bool,
    verify_image: bool,
    extra_pytest_args: Tuple,
):
    """Pull and optionally verify CI images - possibly in parallel for all Python versions."""
    if run_in_parallel:
        python_version_list = get_python_version_list(python_versions)
        ci_image_params_list = [
            BuildCiParams(image_tag=image_tag,
                          python=python,
                          github_repository=github_repository)
            for python in python_version_list
        ]
        run_pull_in_parallel(
            dry_run=dry_run,
            parallelism=parallelism,
            image_params_list=ci_image_params_list,
            python_version_list=python_version_list,
            verbose=verbose,
            verify_image=verify_image,
            wait_for_image=wait_for_image,
            tag_as_latest=tag_as_latest,
            extra_pytest_args=extra_pytest_args
            if extra_pytest_args is not None else (),
        )
    else:
        image_params = BuildCiParams(image_tag=image_tag,
                                     python=python,
                                     github_repository=github_repository)
        return_code, info = run_pull_image(
            image_params=image_params,
            dry_run=dry_run,
            verbose=verbose,
            wait_for_image=wait_for_image,
            tag_as_latest=tag_as_latest,
            poll_time=10.0,
        )
        if return_code != 0:
            console.print(
                f"[red]There was an error when pulling CI image: {info}[/]")
            sys.exit(return_code)
예제 #16
0
def build_ci_image_in_parallel(verbose: bool, dry_run: bool, parallelism: int,
                               python_version_list: List[str], **kwargs):
    """Run CI image builds in parallel."""
    console.print(
        f"\n[bright_blue]Running with parallelism = {parallelism} for the images: {python_version_list}:"
    )
    pool = mp.Pool(parallelism)
    results = [
        pool.apply_async(build_ci_image,
                         args=(verbose, dry_run, False),
                         kwds=kwargs)
    ]
    check_async_run_results(results)
    pool.close()
예제 #17
0
 def extra_docker_build_flags(self) -> List[str]:
     extra_build_flags = []
     if len(self.install_airflow_reference) > 0:
         AIRFLOW_INSTALLATION_METHOD = (
             "https://github.com/apache/airflow/archive/" +
             self.install_airflow_reference + ".tar.gz#egg=apache-airflow")
         extra_build_flags.extend([
             "--build-arg",
             AIRFLOW_INSTALLATION_METHOD,
         ])
         extra_build_flags.extend(self.args_for_remote_install)
         self.airflow_version = self.install_airflow_reference
     elif len(self.install_airflow_version) > 0:
         if not re.match(r'^[0-9\.]+((a|b|rc|alpha|beta|pre)[0-9]+)?$',
                         self.install_airflow_version):
             console.print(
                 f'\n[red]ERROR: Bad value for install-airflow-version:{self.install_airflow_version}'
             )
             console.print(
                 '[red]Only numerical versions allowed for PROD image here !'
             )
             sys.exit()
         extra_build_flags.extend(
             ["--build-arg", "AIRFLOW_INSTALLATION_METHOD=apache-airflow"])
         extra_build_flags.extend([
             "--build-arg",
             f"AIRFLOW_VERSION_SPECIFICATION==={self.install_airflow_version}"
         ])
         extra_build_flags.extend([
             "--build-arg",
             f"AIRFLOW_VERSION={self.install_airflow_version}"
         ])
         extra_build_flags.extend(self.args_for_remote_install)
         self.airflow_version = self.install_airflow_version
     else:
         extra_build_flags.extend([
             "--build-arg",
             f"AIRFLOW_SOURCES_FROM={AIRFLOW_SOURCES_FROM}",
             "--build-arg",
             f"AIRFLOW_SOURCES_TO={AIRFLOW_SOURCES_TO}",
             "--build-arg",
             f"AIRFLOW_SOURCES_WWW_FROM={AIRFLOW_SOURCES_WWW_FROM}",
             "--build-arg",
             f"AIRFLOW_SOURCES_WWW_TO={AIRFLOW_SOURCES_WWW_TO}",
             "--build-arg",
             f"AIRFLOW_INSTALLATION_METHOD={self.installation_method}",
             "--build-arg",
             f"AIRFLOW_CONSTRAINTS_REFERENCE={self.default_constraints_branch}",
         ])
     return extra_build_flags
예제 #18
0
def md5sum_check_if_build_is_needed(md5sum_cache_dir: Path,
                                    the_image_type: str) -> bool:
    """
    Checks if build is needed based on whether important files were modified.

    :param md5sum_cache_dir: directory where cached md5 sums are stored
    :param the_image_type: type of the image to check (PROD/CI)
    :return: True if build is needed.
    """
    build_needed = False
    modified_files, not_modified_files = calculate_md5_checksum_for_files(
        md5sum_cache_dir, update=False)
    if len(modified_files) > 0:
        console.print(
            '[bright_yellow]The following files are modified since last time image was built: [/]\n\n'
        )
        for file in modified_files:
            console.print(f" * [bright_blue]{file}[/]")
        console.print(
            f'\n[bright_yellow]Likely {the_image_type} image needs rebuild[/]\n'
        )
        build_needed = True
    else:
        console.print(
            f'Docker image build is not needed for {the_image_type} build as no important files are changed!'
        )
    return build_needed
예제 #19
0
def check_pre_commit_installed(verbose: bool) -> bool:
    """
    Check if pre-commit is installed in the right version.
    :param verbose: print commands when running
    :return: True is the pre-commit is installed in the right version.
    """
    # Local import to make autocomplete work
    import yaml

    pre_commit_config = yaml.safe_load(
        (AIRFLOW_SOURCES_ROOT / ".pre-commit-config.yaml").read_text())
    min_pre_commit_version = pre_commit_config["minimum_pre_commit_version"]

    pre_commit_name = "pre-commit"
    is_installed = False
    if shutil.which(pre_commit_name) is not None:
        process = run_command([pre_commit_name, "--version"],
                              verbose=verbose,
                              check=True,
                              capture_output=True,
                              text=True)
        if process and process.stdout:
            pre_commit_version = process.stdout.split(" ")[-1].strip()
            if StrictVersion(pre_commit_version) >= StrictVersion(
                    min_pre_commit_version):
                console.print(
                    f"\n[green]Package {pre_commit_name} is installed. "
                    f"Good version {pre_commit_version} (>= {min_pre_commit_version})[/]\n"
                )
                is_installed = True
            else:
                console.print(
                    f"\n[red]Package name {pre_commit_name} version is wrong. It should be"
                    f"aat least {min_pre_commit_version} and is {pre_commit_version}.[/]\n\n"
                )
        else:
            console.print(
                "\n[bright_yellow]Could not determine version of pre-commit. "
                "You might need to update it![/]\n")
            is_installed = True
    else:
        console.print(
            f"\n[red]Error: Package name {pre_commit_name} is not installed.[/]"
        )
    if not is_installed:
        console.print(
            "\nPlease install using https://pre-commit.com/#install to continue\n"
        )
    return is_installed
예제 #20
0
def fix_group_permissions():
    """Fixes permissions of all the files and directories that have group-write access."""
    console.print("[bright_blue]Fixing group permissions[/]")
    files_to_fix_result = run_command(['git', 'ls-files', './'], capture_output=True, text=True)
    if files_to_fix_result.returncode == 0:
        files_to_fix = files_to_fix_result.stdout.strip().split('\n')
        for file_to_fix in files_to_fix:
            change_file_permission(Path(file_to_fix))
    directories_to_fix_result = run_command(
        ['git', 'ls-tree', '-r', '-d', '--name-only', 'HEAD'], capture_output=True, text=True
    )
    if directories_to_fix_result.returncode == 0:
        directories_to_fix = directories_to_fix_result.stdout.strip().split('\n')
        for directory_to_fix in directories_to_fix:
            change_directory_permission(Path(directory_to_fix))
예제 #21
0
def reinstall_breeze(breeze_sources: Path):
    """
    Reinstalls Breeze from specified sources.
    :param breeze_sources: Sources where to install Breeze from.
    """
    # Note that we cannot use `pipx upgrade` here because we sometimes install
    # Breeze from different sources than originally installed (i.e. when we reinstall airflow
    # From the current directory.
    console.print(
        f"\n[bright_blue]Reinstalling Breeze from {breeze_sources}\n")
    subprocess.check_call(
        ["pipx", "install", "-e",
         str(breeze_sources), "--force"])
    console.print(
        f"\n[bright_blue]Breeze has been reinstalled from {breeze_sources}. Exiting now.[/]\n\n"
        f"[bright_yellow]Please run your command again[/]\n")
    sys.exit(0)
예제 #22
0
def get_python_version_list(python_versions: str) -> List[str]:
    """
    Retrieve and validate space-separated list of Python versions and return them in the form of list.
    :param python_versions: space separated list of Python versions
    :return: List of python versions
    """
    python_version_list = python_versions.split(" ")
    errors = False
    for python in python_version_list:
        if python not in ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS:
            console.print(f"[red]The Python version {python} passed in {python_versions} is wrong.[/]")
            errors = True
    if errors:
        console.print(
            f"\nSome of the Python versions passed are not in the "
            f"list: {ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS}. Quitting.\n"
        )
        sys.exit(1)
    return python_version_list
예제 #23
0
def get_extra_docker_flags(mount_sources: str) -> List[str]:
    """
    Returns extra docker flags based on the type of mounting we want to do for sources.
    :param mount_sources: type of mounting we want to have
    :return: extra flag as list of strings
    """
    extra_docker_flags = []
    if mount_sources == MOUNT_ALL:
        extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT}:/opt/airflow/:cached"])
    elif mount_sources == MOUNT_SELECTED:
        for flag in NECESSARY_HOST_VOLUMES:
            extra_docker_flags.extend(["-v", str(AIRFLOW_SOURCES_ROOT) + flag])
    else:  # none
        console.print('[bright_blue]Skip mounting host volumes to Docker[/]')
    extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT}/files:/files"])
    extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT}/dist:/dist"])
    extra_docker_flags.extend(["--rm"])
    extra_docker_flags.extend(["--env-file", f"{AIRFLOW_SOURCES_ROOT}/scripts/ci/docker-compose/_docker.env"])
    return extra_docker_flags
예제 #24
0
def prepare_build_command(prepare_buildx_cache: bool,
                          verbose: bool) -> List[str]:
    """
    Prepare build command for docker build. Depending on whether we have buildx plugin installed or not,
    and whether we run cache preparation, there might be different results:

    * if buildx plugin is installed - `docker buildx` command is returned - using regular or cache builder
      depending on whether we build regular image or cache
    * if no buildx plugin is installed, and we do not prepare cache, regular docker `build` command is used.
    * if no buildx plugin is installed, and we prepare cache - we fail. Cache can only be done with buildx
    :param prepare_buildx_cache: whether we are preparing buildx cache.
    :param verbose: print commands when running
    :return: command to use as docker build command
    """
    build_command_param = []
    is_buildx_available = check_if_buildx_plugin_installed(verbose=verbose)
    if is_buildx_available:
        if prepare_buildx_cache:
            build_command_param.extend([
                "buildx", "build", "--builder", "airflow_cache",
                "--progress=tty"
            ])
            cmd = ['docker', 'buildx', 'inspect', 'airflow_cache']
            process = run_command(cmd, verbose=True, text=True)
            if process and process.returncode != 0:
                next_cmd = [
                    'docker', 'buildx', 'create', '--name', 'airflow_cache'
                ]
                run_command(next_cmd, verbose=True, text=True, check=False)
        else:
            build_command_param.extend(
                ["buildx", "build", "--builder", "default", "--progress=tty"])
    else:
        if prepare_buildx_cache:
            console.print(
                '\n[red] Buildx cli plugin is not available and you need it to prepare buildx cache. \n'
            )
            console.print(
                '[red] Please install it following https://docs.docker.com/buildx/working-with-buildx/ \n'
            )
            sys.exit(1)
        build_command_param.append("build")
    return build_command_param
예제 #25
0
def shell(
    verbose: bool,
    dry_run: bool,
    python: str,
    github_repository: str,
    backend: str,
    integration: Tuple[str],
    postgres_version: str,
    mysql_version: str,
    mssql_version: str,
    forward_credentials: bool,
    mount_sources: str,
    use_airflow_version: str,
    force_build: bool,
    db_reset: bool,
    answer: Optional[str],
    extra_args: Tuple,
):
    """Enter breeze.py environment. this is the default command use when no other is selected."""
    set_forced_answer(answer)
    if verbose or dry_run:
        console.print("\n[green]Welcome to breeze.py[/]\n")
        console.print(
            f"\n[green]Root of Airflow Sources = {AIRFLOW_SOURCES_ROOT}[/]\n")
    enter_shell(
        verbose=verbose,
        dry_run=dry_run,
        python=python,
        github_repository=github_repository,
        backend=backend,
        integration=integration,
        postgres_version=postgres_version,
        mysql_version=mysql_version,
        mssql_version=mssql_version,
        forward_credentials=str(forward_credentials),
        mount_sources=mount_sources,
        use_airflow_version=use_airflow_version,
        force_build=force_build,
        db_reset=db_reset,
        extra_args=extra_args,
        answer=answer,
    )
예제 #26
0
def run_pull_in_parallel(
    dry_run: bool,
    parallelism: int,
    image_params_list: Union[List[BuildCiParams], List[BuildProdParams]],
    python_version_list: List[str],
    verbose: bool,
    verify_image: bool,
    tag_as_latest: bool,
    wait_for_image: bool,
    extra_pytest_args: Tuple,
):
    """Run image pull in parallel"""
    console.print(
        f"\n[bright_blue]Pulling with parallelism = {parallelism} for the images: {python_version_list}:"
    )
    pool = mp.Pool(parallelism)
    poll_time = 10.0
    if not verify_image:
        results = [
            pool.apply_async(run_pull_image,
                             args=(image_param, dry_run, verbose,
                                   wait_for_image, tag_as_latest, poll_time))
            for image_param in image_params_list
        ]
    else:
        results = [
            pool.apply_async(
                run_pull_and_verify_image,
                args=(
                    image_param,
                    dry_run,
                    verbose,
                    wait_for_image,
                    tag_as_latest,
                    poll_time,
                    extra_pytest_args,
                ),
            ) for image_param in image_params_list
        ]
    check_async_run_results(results)
    pool.close()
예제 #27
0
def enter_shell(
    **kwargs
) -> Union[subprocess.CompletedProcess, subprocess.CalledProcessError]:
    """
    Executes entering shell using the parameters passed as kwargs:

    * checks if docker version is good
    * checks if docker-compose version is good
    * updates kwargs with cached parameters
    * displays ASCIIART and CHEATSHEET unless disabled
    * build ShellParams from the updated kwargs
    * executes the command to drop the user to Breeze shell

    """
    verbose = kwargs['verbose']
    dry_run = kwargs['dry_run']
    if not check_docker_is_running(verbose):
        console.print(
            '[red]Docker is not running.[/]\n'
            '[bright_yellow]Please make sure Docker is installed and running.[/]'
        )
        sys.exit(1)
    check_docker_version(verbose)
    check_docker_compose_version(verbose)
    updated_kwargs = synchronize_cached_params(kwargs)
    if read_from_cache_file('suppress_asciiart') is None:
        console.print(ASCIIART, style=ASCIIART_STYLE)
    if read_from_cache_file('suppress_cheatsheet') is None:
        console.print(CHEATSHEET, style=CHEATSHEET_STYLE)
    enter_shell_params = ShellParams(**filter_out_none(**updated_kwargs))
    return run_shell_with_build_image_checks(verbose, dry_run,
                                             enter_shell_params)
예제 #28
0
파일: cache.py 프로젝트: wkodate/airflow
def write_to_cache_file(param_name: str,
                        param_value: str,
                        check_allowed_values: bool = True) -> None:
    """
    Writs value to cache. If asked it can also check if the value is allowed for the parameter. and exit
    in case the value is not allowed for that parameter instead of writing it.
    :param param_name: name of the parameter
    :param param_value: new value for the parameter
    :param check_allowed_values: whether to fail if the parameter value is not allowed for that name.
    """
    allowed = False
    allowed_values = None
    if check_allowed_values:
        allowed, allowed_values = check_if_values_allowed(
            param_name, param_value)
    if allowed or not check_allowed_values:
        cache_path = Path(BUILD_CACHE_DIR, f".{param_name}")
        cache_path.parent.mkdir(parents=True, exist_ok=True)
        cache_path.write_text(param_value)
    else:
        console.print(
            f'[cyan]You have sent the {param_value} for {param_name}')
        console.print(
            f'[cyan]Allowed value for the {param_name} are {allowed_values}')
        console.print(
            '[cyan]Provide one of the supported params. Write to cache dir failed'
        )
        sys.exit(1)
예제 #29
0
def check_docker_version(verbose: bool):
    """
    Checks if the docker compose version is as expected (including some specific modifications done by
    some vendors such as Microsoft (they might have modified version of docker-compose/docker in their
    cloud. In case docker compose version is wrong we continue but print warning for the user.

    :param verbose: print commands when running
    """
    permission_denied = check_docker_permission(verbose)
    if not permission_denied:
        docker_version_command = [
            'docker', 'version', '--format', '{{.Client.Version}}'
        ]
        docker_version = ''
        docker_version_output = run_command(
            docker_version_command,
            verbose=verbose,
            no_output_dump_on_exception=True,
            capture_output=True,
            text=True,
        )
        if docker_version_output.returncode == 0:
            docker_version = docker_version_output.stdout.strip()
        if docker_version == '':
            console.print(
                f'Your version of docker is unknown. If the scripts fail, please make sure to \
                    install docker at least: {MIN_DOCKER_VERSION} version.')
        else:
            good_version = compare_version(docker_version, MIN_DOCKER_VERSION)
            if good_version:
                console.print(f'Good version of Docker: {docker_version}.')
            else:
                console.print(
                    f'Your version of docker is too old:{docker_version}. Please upgrade to \
                    at least {MIN_DOCKER_VERSION}')
예제 #30
0
def print_async_summary(completed_list: List[ApplyResult]) -> None:
    """
    Print summary of completed async results.
    :param completed_list: list of completed async results.
    """
    completed_list.sort(key=lambda x: x.get()[1])
    console.print()
    for result in completed_list:
        return_code, info = result.get()
        if return_code != 0:
            console.print(
                f"[red]NOK[/] for {info}: Return code: {return_code}.")
        else:
            console.print(f"[green]OK [/] for {info}.")
    console.print()