def run_command( cmd: List[str], *, check: bool = True, verbose: bool = False, suppress_raise_exception: bool = False, suppress_console_print: bool = False, **kwargs, ): if verbose: console.print(f"[blue]$ {' '.join(shlex.quote(c) for c in cmd)}") try: return subprocess.run(cmd, check=check, **kwargs) except subprocess.CalledProcessError as ex: if not suppress_console_print: console.print( "========================= OUTPUT start ============================" ) console.print(ex.stderr) console.print(ex.stdout) console.print( "========================= OUTPUT end ============================" ) if not suppress_raise_exception: raise
def check_buildx_plugin_build_command(self): build_command_param = [] is_buildx_available = check_if_buildx_plugin_available(True) if is_buildx_available: if self.prepare_buildx_cache: build_command_param.extend([ "buildx", "build", "--builder", "airflow_cache", "--progress=tty" ]) cmd = ['docker', 'buildx', 'inspect', 'airflow_cache'] output = run_command(cmd, verbose=True, text=True) if output.returncode != 0: next_cmd = [ 'docker', 'buildx', 'create', '--name', 'airflow_cache' ] run_command(next_cmd, verbose=True, text=True) else: build_command_param.extend([ "buildx", "build", "--builder", "default", "--progress=tty" ]) else: if self.prepare_buildx_cache: console.print( '\n[red] Buildx cli plugin is not available and you need it to prepare buildx cache. \n' ) console.print( '[red] Please install it following https://docs.docker.com/buildx/working-with-buildx/ \n' ) sys.exit() build_command_param.append("build") return build_command_param
def build_production_image(verbose, **kwargs): parameters_passed = filter_out_none(**kwargs) prod_params = get_image_build_params(parameters_passed) prod_params.print_info() if prod_params.cleanup_docker_context_files: clean_docker_context_files() check_docker_context_files(prod_params.install_docker_context_files) if prod_params.skip_building_prod_image: console.print( '[bright_yellow]\nSkip building production image. Assume the one we have is good!' ) console.print( 'bright_yellow]\nYou must run Breeze2 build-prod-image before for all python versions!' ) if prod_params.prepare_buildx_cache: login_to_docker_registry(prod_params) cmd = construct_docker_command(prod_params) run_command( [ "docker", "rmi", "--no-prune", "--force", prod_params.airflow_prod_image_name ], verbose=verbose, cwd=AIRFLOW_SOURCE, text=True, suppress_raise_exception=True, ) run_command(cmd, verbose=verbose, cwd=AIRFLOW_SOURCE, text=True) if prod_params.prepare_buildx_cache: run_command(['docker', 'push', prod_params.airflow_prod_image_name], verbose=True, text=True)
def instruct_build_image(the_image_type: str, python_version: str): console.print(f'\nThe {the_image_type} image for python version {python_version} may be outdated\n') console.print('Please run this command at earliest convenience:\n') if the_image_type == 'CI': console.print(f'./Breeze2 build-ci-image --python {python_version}') else: console.print(f'./Breeze2 build-prod-image --python {python_version}') console.print("\nIf you run it via pre-commit as individual hook, you can run 'pre-commit run build'.\n")
def build_image(verbose, **kwargs): ci_image_params = BuildParams(filter_out_none(**kwargs)) is_cached, value = check_cache_and_write_if_not_cached( "PYTHON_MAJOR_MINOR_VERSION", ci_image_params.python_version) if is_cached: ci_image_params.python_version = value cmd = construct_docker_command(ci_image_params) output = run_command(cmd, verbose=verbose, text=True) console.print(f"[blue]{output}")
def build_shell(verbose, **kwargs): check_docker_version(verbose) check_docker_compose_version(verbose) updated_kwargs = get_cached_params(kwargs) if read_from_cache_file('suppress_asciiart') is None: console.print(ASCIIART, style=ASCIIART_STYLE) if read_from_cache_file('suppress_cheatsheet') is None: console.print(CHEATSHEET, style=CHEATSHEET_STYLE) enter_shell_params = ShellBuilder(**filter_out_none(**updated_kwargs)) build_image_checks(verbose, enter_shell_params)
def check_package_installed(package_name: str) -> bool: is_installed = False if shutil.which('pre-commit') is not None: is_installed = True console.print(f"\n[blue]Package name {package_name} is installed to run static check test") else: console.print( f"\n[red]Error: Package name {package_name} is not installed. \ Please install using https://pre-commit.com/#install to continue[/]\n" ) return is_installed
def find_airflow_sources_root(): # Try to find airflow sources in current working dir airflow_sources_root = search_upwards_for_airflow_sources_root(Path.cwd()) if not airflow_sources_root: # Or if it fails, find it in parents of the directory where the ./breeze.py is. airflow_sources_root = search_upwards_for_airflow_sources_root(Path(__file__).resolve().parent) global __AIRFLOW_SOURCES_ROOT if airflow_sources_root: __AIRFLOW_SOURCES_ROOT = airflow_sources_root else: console.print(f"\n[yellow]Could not find Airflow sources location. Assuming {__AIRFLOW_SOURCES_ROOT}") os.chdir(__AIRFLOW_SOURCES_ROOT)
def build_ci_image( verbose: bool, additional_extras: Optional[str], python: str, additional_dev_apt_deps: Optional[str], additional_runtime_apt_deps: Optional[str], additional_python_deps: Optional[str], additional_dev_apt_command: Optional[str], additional_runtime_apt_command: Optional[str], additional_dev_apt_env: Optional[str], additional_runtime_apt_env: Optional[str], dev_apt_command: Optional[str], dev_apt_deps: Optional[str], runtime_apt_command: Optional[str], runtime_apt_deps: Optional[str], github_repository: Optional[str], build_cache: Optional[str], platform: Optional[str], debian_version: Optional[str], prepare_buildx_cache: bool, ci: bool, upgrade_to_newer_dependencies: str = "false", ): """Builds docker CI image without entering the container.""" if verbose: console.print( f"\n[blue]Building image of airflow from {__AIRFLOW_SOURCES_ROOT} " f"python version: {python}[/]\n") create_directories() build_image( verbose, additional_extras=additional_extras, python_version=python, additional_dev_apt_deps=additional_dev_apt_deps, additional_runtime_apt_deps=additional_runtime_apt_deps, additional_python_deps=additional_python_deps, additional_runtime_apt_command=additional_runtime_apt_command, additional_dev_apt_command=additional_dev_apt_command, additional_dev_apt_env=additional_dev_apt_env, additional_runtime_apt_env=additional_runtime_apt_env, dev_apt_command=dev_apt_command, dev_apt_deps=dev_apt_deps, runtime_apt_command=runtime_apt_command, runtime_apt_deps=runtime_apt_deps, github_repository=github_repository, docker_cache=build_cache, platform=platform, debian_version=debian_version, prepare_buildx_cache=prepare_buildx_cache, ci=ci, upgrade_to_newer_dependencies=upgrade_to_newer_dependencies, )
def build_image(verbose, **kwargs): parameters_passed = filter_out_none(**kwargs) ci_image_params = get_image_build_params(parameters_passed) ci_image_cache_dir = Path(BUILD_CACHE_DIR, ci_image_params.airflow_branch) ci_image_cache_dir.mkdir(parents=True, exist_ok=True) touch_cache_file( f"built_{ci_image_params.python_version}", root_dir=ci_image_cache_dir, ) cmd = construct_docker_command(ci_image_params) output = run_command(cmd, verbose=verbose, cwd=AIRFLOW_SOURCE, text=True) console.print(f"[blue]{output}")
def extra_docker_build_flags(self) -> List[str]: extra_build_flags = [] if len(self.install_airflow_reference) > 0: AIRFLOW_INSTALLATION_METHOD = ( "https://github.com/apache/airflow/archive/" + self.install_airflow_reference + ".tar.gz#egg=apache-airflow") extra_build_flags.extend([ "--build-arg", AIRFLOW_INSTALLATION_METHOD, ]) extra_build_flags.extend(self.args_for_remote_install) self.airflow_version = self.install_airflow_reference elif len(self.install_airflow_version) > 0: if not re.match(r'^[0-9\.]+((a|b|rc|alpha|beta|pre)[0-9]+)?$', self.install_airflow_version): console.print( f'\n[red]ERROR: Bad value for install-airflow-version:{self.install_airflow_version}' ) console.print( '[red]Only numerical versions allowed for PROD image here !' ) sys.exit() extra_build_flags.extend( ["--build-arg", "AIRFLOW_INSTALLATION_METHOD=apache-airflow"]) extra_build_flags.extend([ "--build-arg", f"AIRFLOW_VERSION_SPECIFICATION==={self.install_airflow_version}" ]) extra_build_flags.extend([ "--build-arg", f"AIRFLOW_VERSION={self.install_airflow_version}" ]) extra_build_flags.extend(self.args_for_remote_install) self.airflow_version = self.install_airflow_version else: extra_build_flags.extend([ "--build-arg", f"AIRFLOW_SOURCES_FROM={AIRFLOW_SOURCES_FROM}", "--build-arg", f"AIRFLOW_SOURCES_TO={AIRFLOW_SOURCES_TO}", "--build-arg", f"AIRFLOW_SOURCES_WWW_FROM={AIRFLOW_SOURCES_WWW_FROM}", "--build-arg", f"AIRFLOW_SOURCES_WWW_TO={AIRFLOW_SOURCES_WWW_TO}", "--build-arg", f"AIRFLOW_INSTALLATION_METHOD={self.installation_method}", "--build-arg", f"AIRFLOW_CONSTRAINTS_REFERENCE={self.default_constraints_branch}", ]) return extra_build_flags
def change_config(python, backend, cheatsheet, asciiart): """ Toggles on/off cheatsheet, asciiart """ from airflow_breeze.cache import delete_cache, touch_cache_file, write_to_cache_file if asciiart: console.print('[blue] ASCIIART enabled') delete_cache('suppress_asciiart') elif asciiart is not None: touch_cache_file('suppress_asciiart') else: pass if cheatsheet: console.print('[blue] Cheatsheet enabled') delete_cache('suppress_cheatsheet') elif cheatsheet is not None: touch_cache_file('suppress_cheatsheet') else: pass if python is not None: write_to_cache_file('PYTHON_MAJOR_MINOR_VERSION', python) console.print(f'[blue]Python cached_value {python}') if backend is not None: write_to_cache_file('BACKEND', backend) console.print(f'[blue]Backend cached_value {backend}')
def fix_group_permissions(): console.print("[blue]Fixing group permissions[/]") files_to_fix_result = run_command(['git', 'ls-files', './'], capture_output=True, text=True) if files_to_fix_result.returncode == 0: files_to_fix = files_to_fix_result.stdout.strip().split('\n') for file_to_fix in files_to_fix: change_file_permission(Path(file_to_fix)) directories_to_fix_result = run_command( ['git', 'ls-tree', '-r', '-d', '--name-only', 'HEAD'], capture_output=True, text=True ) if directories_to_fix_result.returncode == 0: directories_to_fix = directories_to_fix_result.stdout.strip().split('\n') for directory_to_fix in directories_to_fix: change_directory_permission(Path(directory_to_fix))
def instruct_for_setup(): CMDNAME = 'Breeze2' console.print( f"\nYou can setup autocomplete by running {CMDNAME} setup-autocomplete'" ) console.print(" You can toggle ascii/cheatsheet by running:") console.print(f" * {CMDNAME} toggle-suppress-cheatsheet") console.print(f" * {CMDNAME} toggle-suppress-asciiart\n")
def shell( verbose: bool, python: str, backend: str, integration: Tuple[str], build_cache_local: bool, build_cache_pulled: bool, build_cache_disabled: bool, postgres_version: str, mysql_version: str, mssql_version: str, executor: str, forward_credentials: bool, skip_mounting_local_sources: bool, use_airflow_version: str, use_packages_from_dist: bool, force_build: bool, extra_args: Tuple, ): """Enters breeze.py environment. this is the default command use when no other is selected.""" if verbose: console.print("\n[green]Welcome to breeze.py[/]\n") console.print( f"\n[green]Root of Airflow Sources = {__AIRFLOW_SOURCES_ROOT}[/]\n" ) build_shell( verbose, python_version=python, backend=backend, integration=integration, build_cache_local=build_cache_local, build_cache_disabled=build_cache_disabled, build_cache_pulled=build_cache_pulled, postgres_version=postgres_version, mysql_version=mysql_version, mssql_version=mssql_version, executor=executor, forward_credentials=str(forward_credentials), skip_mounting_local_sources=skip_mounting_local_sources, use_airflow_version=use_airflow_version, use_packages_from_dist=use_packages_from_dist, force_build=force_build, extra_args=extra_args, )
def build_ci_image( verbose: bool, additional_extras: Optional[str], python: Optional[float], additional_dev_apt_deps: Optional[str], additional_runtime_apt_deps: Optional[str], additional_python_deps: Optional[str], additional_dev_apt_command: Optional[str], additional_runtime_apt_command: Optional[str], additional_dev_apt_env: Optional[str], additional_runtime_apt_env: Optional[str], dev_apt_command: Optional[str], dev_apt_deps: Optional[str], runtime_apt_command: Optional[str], runtime_apt_deps: Optional[str], github_repository: Optional[str], build_cache: Optional[str], upgrade_to_newer_dependencies: bool, ): """Builds docker CI image without entering the container.""" if verbose: console.print( f"\n[blue]Building image of airflow from {__AIRFLOW_SOURCES_ROOT}[/]\n" ) build_image( verbose, additional_extras=additional_extras, python_version=python, additional_dev_apt_deps=additional_dev_apt_deps, additional_runtime_apt_deps=additional_runtime_apt_deps, additional_python_deps=additional_python_deps, additional_runtime_apt_command=additional_runtime_apt_command, additional_dev_apt_command=additional_dev_apt_command, additional_dev_apt_env=additional_dev_apt_env, additional_runtime_apt_env=additional_runtime_apt_env, dev_apt_command=dev_apt_command, dev_apt_deps=dev_apt_deps, runtime_apt_command=runtime_apt_command, runtime_apt_deps=runtime_apt_deps, github_repository=github_repository, docker_cache=build_cache, upgrade_to_newer_dependencies=str( upgrade_to_newer_dependencies).lower(), )
def docker_cache_prod_directive(self) -> List: docker_cache_prod_directive = [] if self.docker_cache == "pulled": docker_cache_prod_directive.append( f"--cache-from={self.airflow_prod_image_name}") elif self.docker_cache == "disabled": docker_cache_prod_directive.append("--no-cache") else: docker_cache_prod_directive = [] if self.prepare_buildx_cache: docker_cache_prod_directive.extend( ["--cache-to=type=inline,mode=max", "--push"]) if is_multi_platform(self.platform): console.print( "\nSkip loading docker image on multi-platform build") else: docker_cache_prod_directive.extend(["--load"]) return docker_cache_prod_directive
def shell(verbose: bool): """Enters breeze.py environment. this is the default command use when no other is selected.""" from airflow_breeze.cache import read_from_cache_file if verbose: console.print("\n[green]Welcome to breeze.py[/]\n") console.print( f"\n[green]Root of Airflow Sources = {__AIRFLOW_SOURCES_ROOT}[/]\n" ) if read_from_cache_file('suppress_asciiart') is None: console.print(ASCIIART, style=ASCIIART_STYLE) if read_from_cache_file('suppress_cheatsheet') is None: console.print(CHEATSHEET, style=CHEATSHEET_STYLE) raise ClickException("\nPlease implement entering breeze.py\n")
def login_to_docker_registry(build_params: ProdParams): if build_params.ci == "true": if len(build_params.github_token) == 0: console.print( "\n[blue]Skip logging in to GitHub Registry. No Token available!" ) elif build_params.airflow_login_to_github_registry != "true": console.print("\n[blue]Skip logging in to GitHub Registry.\ AIRFLOW_LOGIN_TO_GITHUB_REGISTRY is set as false") elif len(build_params.github_token) > 0: run_command(['docker', 'logout', 'ghcr.io'], verbose=True, text=True) run_command( [ 'docker', 'login', '--username', build_params.github_username, '--password-stdin', 'ghcr.io', ], verbose=True, text=True, input=build_params.github_token, ) else: console.print( '\n[blue]Skip Login to GitHub Container Registry as token is missing' )
def build_image_checks(verbose: bool, shell_params: ShellBuilder): fix_group_permissions() build_ci_image_check_cache = Path(BUILD_CACHE_DIR, shell_params.airflow_branch, f".built_{shell_params.python_version}") if build_ci_image_check_cache.exists(): console.print( f'{shell_params.the_image_type} image already built locally.') else: console.print(f'{shell_params.the_image_type} image not built locally') if not shell_params.force_build: build_image_if_needed_steps(verbose, shell_params) else: build_image( verbose, python_version=shell_params.python_version, upgrade_to_newer_dependencies="false", ) instruct_for_setup() check_docker_resources(verbose, str(shell_params.airflow_sources), shell_params.airflow_ci_image_name) cmd = ['docker-compose', 'run', '--service-ports', '--rm', 'airflow'] cmd_added = shell_params.command_passed env_variables = construct_env_variables_docker_compose_command( shell_params) if cmd_added is not None: cmd.extend(['-c', cmd_added]) if verbose: shell_params.print_badge_info() output = run_command(cmd, verbose=verbose, env=env_variables, text=True) if verbose: console.print(f"[blue]{output}[/]")
def shell(verbose: bool): """Enters breeze.py environment. this is the default command use when no other is selected.""" if verbose: console.print("\n[green]Welcome to breeze.py[/]\n") console.print( f"\n[green]Root of Airflow Sources = {__AIRFLOW_SOURCES_ROOT}[/]\n" ) console.print(ASCIIART, style=ASCIIART_STYLE) raise ClickException("\nPlease implement entering breeze.py\n")
def md5sum_check_if_build_is_needed(md5sum_cache_dir: Path, the_image_type: str) -> bool: build_needed = False modified_files, not_modified_files = calculate_md5_checksum_for_files(md5sum_cache_dir) if len(modified_files) > 0: console.print('The following files are modified: ', modified_files) console.print(f'Likely {the_image_type} image needs rebuild') build_needed = True else: console.print( f'Docker image build is not needed for {the_image_type} build as no important files are changed!' ) return build_needed
def write_to_cache_file(param_name: str, param_value: str, check_allowed_values: bool = True) -> None: allowed = False if check_allowed_values: allowed, allowed_values = check_if_values_allowed(param_name, param_value) if allowed or not check_allowed_values: print('BUID CACHE DIR:', BUILD_CACHE_DIR) Path(BUILD_CACHE_DIR, f".{param_name}").write_text(param_value) else: console.print(f'[cyan]You have sent the {param_value} for {param_name}') console.print(f'[cyan]Allowed value for the {param_name} are {allowed_values}') console.print('[cyan]Provide one of the supported params. Write to cache dir failed') sys.exit()
def version(): """Prints version of breeze.py.""" console.print(ASCIIART, style=ASCIIART_STYLE) console.print(f"\n[green]{NAME} version: {VERSION}[/]\n")
def start_airflow(verbose: bool): """Enters breeze.py environment and set up the tmux session""" if verbose: console.print("\n[green]Welcome to breeze.py[/]\n") console.print(ASCIIART, style=ASCIIART_STYLE) raise ClickException("\nPlease implement entering breeze.py\n")
def build_prod_image(verbose: bool): """Builds docker Production image without entering the container.""" if verbose: console.print("\n[blue]Building image[/]\n") raise ClickException("\nPlease implement building the Production image\n")
def print_info(self): console.print( f"Airflow {self.airflow_version} Python: {self.python_version}.\ Image description: {self.image_description}")
def run_command( cmd: List[str], *, check: bool = True, verbose: bool = False, suppress_raise_exception: bool = False, suppress_console_print: bool = False, env: Optional[Mapping[str, str]] = None, cwd: Optional[Path] = None, **kwargs, ): workdir: str = str(cwd) if cwd else os.getcwd() if verbose: command_to_print = ' '.join(shlex.quote(c) for c in cmd) # if we pass environment variables to execute, then env_to_print = ' '.join(f'{key}="{val}"' for (key, val) in env.items()) if env else '' console.print(f"\n[blue]Working directory {workdir} [/]\n") # Soft wrap allows to copy&paste and run resulting output as it has no hard EOL console.print(f"\n[blue]{env_to_print} {command_to_print}[/]\n", soft_wrap=True) try: # copy existing environment variables cmd_env = deepcopy(os.environ) if env: # Add environment variables passed as parameters cmd_env.update(env) return subprocess.run(cmd, check=check, env=cmd_env, cwd=workdir, **kwargs) except subprocess.CalledProcessError as ex: if not suppress_console_print: console.print("========================= OUTPUT start ============================") console.print(ex.stderr) console.print(ex.stdout) console.print("========================= OUTPUT end ============================") if not suppress_raise_exception: raise
def build_image_if_needed_steps(verbose: bool, shell_params: ShellBuilder): build_needed = md5sum_check_if_build_is_needed( shell_params.md5sum_cache_dir, shell_params.the_image_type) if build_needed: try: user_status = inputimeout( prompt= '\nDo you want to build image?Press y/n/q in 5 seconds\n', timeout=5, ) if user_status == 'y': latest_sha = get_latest_sha(shell_params.github_repository, shell_params.airflow_branch) if is_repo_rebased(latest_sha): build_image( verbose, python_version=shell_params.python_version, upgrade_to_newer_dependencies="false", ) else: if click.confirm( "\nThis might take a lot of time, we think you should rebase first. \ But if you really, really want - you can do it\n"): build_image( verbose, python_version=shell_params.python_version, upgrade_to_newer_dependencies="false", ) else: console.print( '\nPlease rebase your code before continuing.\ Check this link to know more \ https://github.com/apache/airflow/blob/main/CONTRIBUTING.rst#id15\n' ) console.print('Exiting the process') sys.exit() elif user_status == 'n': instruct_build_image(shell_params.the_image_type, shell_params.python_version) elif user_status == 'q': console.print('\nQuitting the process') sys.exit() else: console.print('\nYou have given a wrong choice:', user_status, ' Quitting the process') sys.exit() except TimeoutOccurred: console.print('\nTimeout. Considering your response as No\n') instruct_build_image(shell_params.the_image_type, shell_params.python_version) except Exception: console.print('\nTerminating the process') sys.exit()
def print_badge_info(self): console.print(f'Use {self.the_image_type} image') console.print(f'Branch Name: {self.airflow_branch}') console.print(f'Docker Image: {self.airflow_ci_image_name_with_tag}') console.print(f'Airflow source version:{self.airflow_version}') console.print(f'Python Version: {self.python_version}') console.print(f'Backend: {self.backend} {self.backend_version}') console.print(f'Airflow used at runtime: {self.use_airflow_version}')