def check_installed_software() -> None: log.debug( "python version: {}.{}.{}", sys.version_info.major, sys.version_info.minor, sys.version_info.micro, ) # 17.05 added support for multi-stage builds # https://docs.docker.com/compose/compose-file/compose-file-v3/#compose-and-docker-compatibility-matrix # 18.09.2 fixed the CVE-2019-5736 vulnerability # 20.10.0 introduced copy --chmod and improved logging Packages.check_program("docker", min_version="20.10.0", min_recommended_version="20.10.0") if docker.compose.is_installed(): # too slow to verify the version on every commands... near half a seconds # Sometimes a couple of seconds! # v = docker.compose.version() # log.debug("docker compose is installed: {}", v) log.debug("docker compose is installed") else: # pragma: no cover print_and_exit( "A mandatory dependency is missing: docker compose not found" "\nInstallation guide: " "https://docs.docker.com/compose/cli-command/#installing-compose-v2" "\nor try the automated installation with {command}", command=RED("rapydo install compose"), )
def ping(self, do_exit: bool = True) -> bool: registry_host = Application.env["REGISTRY_HOST"] registry_port = int( Application.env.get("REGISTRY_PORT", "5000") or "5000") with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.settimeout(1) try: result = sock.connect_ex((registry_host, registry_port)) except socket.gaierror: # The error is not important, let's use a generic -1 # result = errno.ESRCH result = -1 if result == 0: return True if do_exit: print_and_exit( "Registry {} not reachable. You can start it with {command}", self.get_host(), command=RED("rapydo run registry"), ) return False
def backup(container: Optional[Tuple[str, str]], now: datetime, force: bool, dry_run: bool) -> None: if container and not force: print_and_exit( "RabbitMQ is running and the backup will temporary stop it. " "If you want to continue add --force flag") docker = Docker() if container and not dry_run: docker.remove(SERVICE_NAME) backup_path = f"/backup/{SERVICE_NAME}/{now}.tar.gz" log.info("Starting backup on {}...", SERVICE_NAME) if not dry_run: log.info("Executing rabbitmq mnesia...") docker.compose.create_volatile_container( SERVICE_NAME, command=f"tar -zcf {backup_path} -C /var/lib/rabbitmq mnesia") # Verify the gz integrity if not dry_run: log.info("Verifying the integrity of the backup file...") docker.compose.create_volatile_container( SERVICE_NAME, command=f"gzip -t {backup_path}") log.info("Backup completed: data{}", backup_path) if container and not dry_run: docker.start(SERVICE_NAME)
def restore( container: Optional[Tuple[str, str]], backup_file: str, force: bool ) -> None: if not container: print_and_exit( "The restore procedure requires {} running, please start your stack", SERVICE_NAME, ) docker = Docker() log.info("Starting restore on {}...", SERVICE_NAME) backup_path = f"/backup/{SERVICE_NAME}/{backup_file}" dump_file = backup_file.replace(".gz", "") dump_path = f"/tmp/{dump_file}" docker.exec_command(container, user="******", command=f"cp {backup_path} /tmp/") docker.exec_command( container, user="******", command=f"gunzip -kf /tmp/{backup_file}" ) # Executed as root docker.exec_command(container, user="******", command=f"chown postgres {dump_path}") # By using pg_dumpall the resulting dump can be restored with psql: docker.exec_command( container, user="******", command=f"psql -U sqluser -f {dump_path} postgres", ) log.info("Restore from data{} completed", backup_path)
def create_template( templating: Templating, template_name: str, target_path: Path, name: str, services: List[str], auth: str, force: bool, project: str, ) -> None: if not force and target_path.exists(): print_and_exit("{} already exists", target_path) template = templating.get_template( template_name, { "name": name, "services": services, "auth_service": auth, "project": project }, ) templating.save_template(target_path, template, force=force)
def get_expired_passwords() -> List[Tuple[str, datetime]]: expired_passwords: List[Tuple[str, datetime]] = [] last_updates = parse_projectrc() now = datetime.now() for s in PASSWORD_MODULES: # This should never happens and can't be (easily) tested if s not in Application.data.base_services: # pragma: no cover print_and_exit("Command misconfiguration, unknown {} service", s) if s != REGISTRY and s not in Application.data.active_services: continue if s == REGISTRY and not Configuration.swarm_mode: continue module = PASSWORD_MODULES.get(s) if not module: # pragma: no cover print_and_exit(f"{s} misconfiguration, module not found") for variable in module.PASSWORD_VARIABLES: if variable in last_updates: change_date = last_updates.get(variable, datetime.fromtimestamp(0)) expiration_date = change_date + timedelta( days=PASSWORD_EXPIRATION) if now > expiration_date: expired_passwords.append(( variable, expiration_date, )) return expired_passwords
def get_services( services: Optional[Union[str, Iterable[str]]], default: List[str], ) -> List[str]: return_list: List[str] = [] if not services: return_list = sorted(default) elif isinstance(services, str): warnings.warn("Deprecated use of -s option") return_list = sorted(services.split(",")) else: return_list = sorted(services) excluded_services_list: List[str] = [ s[1:] for s in return_list if s.startswith("_") ] if excluded_services_list: # Filter out _ services from return_list return_list = [s for s in return_list if not s.startswith("_")] for service in excluded_services_list: if service not in return_list: print_and_exit("No such service: {}", service) return sorted(s for s in return_list if s not in excluded_services_list) return return_list
def verify_rapydo_version(rapydo_version: str = "") -> bool: """ Verify if the installed controller matches the current project requirement """ if not rapydo_version: rapydo_version = Configuration.rapydo_version if not rapydo_version: # pragma: no cover return True r = Version(rapydo_version) c = Version(__version__) if r == c: return True else: # pragma: no cover if r > c: ac = f"Upgrade your controller to version {r}" else: ac = f"Downgrade your controller to version {r} or upgrade your project" msg = f"""RAPyDo version is not compatible. This project requires RAPyDo {r} but you are using version {c}. {ac} You can use of one: - rapydo install (install in editable from submodules/do) - rapydo install --no-editable (install from pypi) """ print_and_exit(msg)
def read_composer_yamls(config_files: List[Path]) -> Tuple[List[Path], List[Path]]: base_files: List[Path] = [] all_files: List[Path] = [] # YAML CHECK UP for path in config_files: try: # This is to verify that mandatory files exist and yml syntax is valid conf = load_yaml_file(file=path, is_optional=False) if conf.get("version") != COMPOSE_FILE_VERSION: # pragma: no cover log.warning( "Compose file version in {} is {}, expected {}", path, conf.get("version"), COMPOSE_FILE_VERSION, ) if path.exists(): all_files.append(path) # Base files are those loaded from CONFS_DIR if CONFS_DIR in path.parents: base_files.append(path) except KeyError as e: # pragma: no cover print_and_exit("Error reading {}: {}", path, str(e)) return all_files, base_files
def main() -> None: # All imports moved here to prevent to slow down the import of main import warnings from controller import TESTING, log, print_and_exit if TESTING: warnings.filterwarnings("error") else: # pragma: no cover warnings.filterwarnings("default") from colorama import deinit, init # type: ignore from python_on_whales.utils import DockerException from controller.app import Application try: init() Application.load_projectrc() controller = Application() controller.app() except DockerException as e: # pragma: no cover log.critical("Uncatched exception: {}", type(e)) print_and_exit(str(e)) deinit()
def interfaces( service: ServiceTypes = typer.Argument( ..., help="Service name", ), detach: bool = typer.Option( False, "--detach", help="Detached mode to run the container in background", show_default=False, ), port: Optional[int] = typer.Option( None, "--port", "-p", help="port to be associated to the current service interface", ), ) -> None: Application.print_command( Application.serialize_parameter("--detach", detach, IF=detach), Application.serialize_parameter("--port", port, IF=port), Application.serialize_parameter("", service), ) # Deprecated since 1.2 if service.value == "sqlalchemy": log.warning("Deprecated interface sqlalchemy, use adminer instead") return None # Deprecated since 2.1 print_and_exit("Interfaces command is replaced by rapydo run {}", service)
def scale(scaling: str = typer.Argument( ..., help="scale SERVICE to NUM_REPLICA")) -> None: Application.print_command(Application.serialize_parameter("", scaling)) Application.get_controller().controller_init() options = scaling.split("=") if len(options) != 2: scale_var = f"DEFAULT_SCALE_{scaling.upper()}" nreplicas = glom(Configuration.specs, f"variables.env.{scale_var}", default="1") service = scaling else: service, nreplicas = options if isinstance(nreplicas, str) and not nreplicas.isnumeric(): print_and_exit("Invalid number of replicas: {}", nreplicas) verify_available_images( [service], Application.data.compose_config, Application.data.base_services, ) docker = Docker() docker.compose.start_containers([service], scales={service: int(nreplicas)})
def find_templates_build(base_services: ComposeServices, include_image: bool = False) -> BuildInfo: templates: BuildInfo = {} for template_name, base_service in base_services.items(): template_build = base_service.build if not template_build and not include_image: continue template_image = base_service.image if template_image is None: # pragma: no cover print_and_exit("Template builds must have a name, missing for {}", template_name) if template_image not in templates: templates[template_image] = { "services": [], "path": template_build.context if template_build else None, "service": template_name, } else: templates[template_image]["service"] = name_priority( templates[template_image]["service"], template_name, ) templates[template_image]["services"].append(template_name) return templates
def install(package: Union[str, Path], editable: bool) -> None: """ Install a python package in editable or normal mode """ try: options = ["install", "--upgrade"] if editable: options.append("--prefix") options.append("~/.local") options.append("--editable") else: options.append("--user") # Note: package is a Path if editable, str otherwise options.append(str(package)) output = Packages.execute_command("pip3", options) for r in output.split("\n"): print(r) except Exception as e: # pragma: no cover print_and_exit(str(e))
def tuning( service: SupportedServices = typer.Argument(..., help="Service name"), cpu: int = typer.Option(None, "--cpu", help="Force the amount of cpus", min=1), ram: int = typer.Option(None, "--ram", help="Force the amount of ram", min=1), ) -> None: Application.print_command( Application.serialize_parameter("--cpu", cpu, IF=cpu), Application.serialize_parameter("--ram", ram, IF=ram), Application.serialize_parameter("", service), ) Application.get_controller().controller_init() if not cpu: cpu = os.cpu_count() or 1 if not ram: ram = os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES") log.info("Number of CPU(s): {}", cpu) log.info("Amount of RAM: {}", system.bytes_to_str(ram)) log.info("Suggested settings:") module = TUNING_MODULES.get(service.value) if not module: # pragma: no cover print_and_exit(f"{service.value} misconfiguration, module not found") module.tuning(ram, cpu)
def validate_env(env: Dict[str, EnvType]) -> None: try: BaseEnvModel(**env) except ValidationError as e: for field in str(e).split("\n")[1::2]: log.error("Invalid value for {}: {}", field, env.get(field, "N/A")) print_and_exit(str(e))
def clone( url: str, path: Path, branch: str, do: bool = False, check: bool = True ) -> Repo: local_path = SUBMODULES_DIR.joinpath(path) if local_path.exists(): log.debug("Path {} already exists", local_path) gitobj = Repo(local_path) elif do: gitobj = Repo.clone_from(url=url, to_path=local_path) log.info("Cloned {}@{} as {}", url, branch, path) else: print_and_exit( "Repo {} missing as {}. You should init your project", url, local_path, ) if do: ret = switch_branch(gitobj, branch) if not ret: # pragma: no cover print_and_exit("Cannot switch repo {} to version {}", local_path, branch) if check: compare_repository(gitobj, branch, online_url=url) return gitobj
def restore(container: Optional[Tuple[str, str]], backup_file: str, force: bool) -> None: if container and not force: print_and_exit( "RabbitMQ is running and the restore will temporary stop it. " "If you want to continue add --force flag") docker = Docker() if container: docker.remove(SERVICE_NAME) backup_path = f"/backup/{SERVICE_NAME}/{backup_file}" command = f"tar -xf {backup_path} -C /var/lib/rabbitmq/" log.info("Starting restore on {}...", SERVICE_NAME) docker.compose.create_volatile_container(SERVICE_NAME, command=command) log.info("Restore from data{} completed", backup_path) if container: docker.start(SERVICE_NAME)
def login(self) -> None: registry = self.get_host() try: self.docker.login( server=registry, username=cast(str, Application.env["REGISTRY_USERNAME"]), password=cast(str, Application.env["REGISTRY_PASSWORD"]), ) except DockerException as e: if "docker login --username" in str(e): settings = f""" {{ "insecure-registries" : ["{registry}"] }} """ print_and_exit( "Your registry TLS certificate is untrusted.\n\nYou should add the " "following setting into your /etc/docker/daemon.json\n{}\n" "and then restart the docker daemon\n", settings, ) raise e
def fetch(path: str, gitobj: Repo) -> None: for remote in gitobj.remotes: if remote.name == "origin": try: remote.fetch() except GitCommandError as e: # pragma: no cover print_and_exit(str(e))
def check_internet_connection() -> None: """Check if connected to internet""" try: requests.get("https://www.google.com", timeout=2) if Configuration.check: log.info("Internet connection is available") except requests.ConnectionError: # pragma: no cover print_and_exit("Internet connection is unavailable")
def backup(container: Optional[Tuple[str, str]], now: datetime, force: bool, dry_run: bool) -> None: if not container: print_and_exit( "The backup procedure requires {} running, please start your stack", SERVICE_NAME, ) docker = Docker() log.info("Starting backup on {}...", SERVICE_NAME) tmp_backup_path = f"/tmp/{now}" command = f"sh -c 'mariabackup --backup --target-dir={tmp_backup_path} " command += '-uroot -p"$MYSQL_ROOT_PASSWORD"\'' # Creating backup on a tmp folder as mysql user if not dry_run: docker.exec_command(container, user="******", command=command) # Creating backup on a tmp folder as mysql user if not dry_run: log.info("Executing mariabackup...") docker.exec_command( container, user="******", command= f"sh -c 'mariabackup --prepare --target-dir={tmp_backup_path}'", ) # Compress the prepared data folder. Used -C to skip the /tmp from folders paths if not dry_run: log.info("Compressing the backup file...") docker.exec_command( container, user="******", command=f"tar -zcf {tmp_backup_path}.tar.gz -C /tmp {now}", ) # Verify the gz integrity if not dry_run: log.info("Verifying the integrity of the backup file...") docker.exec_command(container, user="******", command=f"gzip -t {tmp_backup_path}.tar.gz") # Move the backup from /tmp to /backup (as root user) backup_path = f"/backup/{SERVICE_NAME}/{now}.tar.gz" if not dry_run: docker.exec_command( container, user="******", command=f"mv {tmp_backup_path}.tar.gz {backup_path}", ) log.info("Backup completed: data{}", backup_path)
def check_redis_password(pwd: Optional[EnvType]) -> None: if pwd: invalid_characters = ["#"] if any(c in str(pwd) for c in invalid_characters): log.critical("Not allowed characters found in REDIS_PASSWORD.") print_and_exit( "Some special characters, including {}, are not allowed " "because make some clients to fail to connect", " ".join(invalid_characters), )
def __init__(self, docker: Docker, check_initialization: bool = True): self.docker_wrapper = docker self.docker = self.docker_wrapper.client if check_initialization and not self.get_token(): print_and_exit( "Swarm is not initialized, please execute {command}", command=RED("rapydo init"), )
def check_rabbit_password(pwd: Optional[EnvType]) -> None: if pwd: invalid_characters = ["£", "§", "”", "’"] if any(c in str(pwd) for c in invalid_characters): log.critical("Not allowed characters found in RABBITMQ_PASSWORD.") print_and_exit( "Some special characters, including {}, are not allowed " "because make RabbitMQ crash at startup", " ".join(invalid_characters), )
def save_template(self, filename: Path, content: str, force: bool = False) -> None: if filename.exists(): if force: self.make_backup(filename) # It is always verified before calling save_template from app, create & add else: # pragma: no cover print_and_exit("File {} already exists", filename) with open(filename, "w+") as fh: fh.write(content)
def get_template(self, filename: str, data: TemplateDataType) -> str: try: template_name = self.get_template_name(filename) template = self.env.get_template(template_name) content = str(template.render(**data)) return content except TemplateNotFound as e: print_and_exit("Template {} not found in: {}", str(e), self.template_dir) except UndefinedError as e: # pragma: no cover print_and_exit(str(e))
def read_specs(self, read_extended: bool = True) -> None: """Read project configuration""" try: confs = configuration.read_configuration( default_file_path=CONFS_DIR, base_project_path=Configuration.ABS_PROJECT_PATH, projects_path=PROJECT_DIR, submodules_path=SUBMODULES_DIR, read_extended=read_extended, production=Configuration.production, ) # confs 3 is the core config, extra fields are allowd configuration.validate_configuration(confs[3], core=True) # confs 0 is the merged conf core + custom, extra fields are allowd configuration.validate_configuration(confs[0], core=False) log.info("Project configuration is valid") Configuration.specs = configuration.mix_configuration( confs[0], Configuration.host_configuration) configuration.validate_configuration(Configuration.specs, core=False) log.info("Host configuration is valid") self.extended_project = confs[1] self.extended_project_path = confs[2] except AttributeError as e: # pragma: no cover print_and_exit(str(e)) Configuration.frontend = cast( str, (Configuration.specs.get("variables", {}).get("env", {}).get( "FRONTEND_FRAMEWORK", NO_FRONTEND)), ) if Configuration.frontend == NO_FRONTEND: Configuration.frontend = None project = Configuration.specs.get("project", {}) Configuration.project_title = project.get("title", "Unknown title") Configuration.version = project.get("version", "") Configuration.rapydo_version = project.get("rapydo", "") Configuration.project_description = project.get( "description", "Unknown description") Configuration.project_keywords = project.get("keywords", "") if not Configuration.rapydo_version: # pragma: no cover print_and_exit( "RAPyDo version not found in your project_configuration file") Configuration.rapydo_version = str(Configuration.rapydo_version)
def verify_image(self, image: str) -> bool: urllib3.disable_warnings( # type: ignore urllib3.exceptions.InsecureRequestWarning) registry = self.get_host() host = f"https://{registry}" repository, tag = image.split(":") r = self.send_request(f"{host}/v2/{repository}/manifests/{tag}", check_status=False) if r.status_code == 401: # pragma: no cover print_and_exit("Access denied to {} registry", host) return r.status_code == 200
def parse_dockerhub(lib: str, sleep_time: int) -> str: if lib == "stilliard/pure-ftpd": return "stretch-latest" if lib == "docker": return "dind" time.sleep(sleep_time) if "/" not in lib: lib = f"library/{lib}" AUTH_URL = "https://auth.docker.io" REGISTRY_URL = "https://registry.hub.docker.com" AUTH_SCOPE = f"repository:{lib}:pull" url = f"{AUTH_URL}/token?service=registry.docker.io&scope={AUTH_SCOPE}" resp = requests.get(url, timeout=30) token = resp.json()["token"] if not token: print_and_exit("Invalid docker hub token") headers = {"Authorization": f"Bearer {token}"} url = f"{REGISTRY_URL}/v2/{lib}/tags/list" resp = requests.get(url, headers=headers, timeout=30) tags = resp.json().get("tags") if lib == "library/node": return get_latest_version(tags, suffix="-buster") if lib == "library/rabbitmq": return get_latest_version(tags, suffix="-management") if lib == "library/ubuntu": return get_latest_version(tags, regexp=SEMVER2, ignores=["21.10"]) if lib == "library/postgres": return get_latest_version(tags, regexp=SEMVER2, suffix="-alpine") if lib == "library/nginx": return get_latest_version(tags, suffix="-alpine") if lib == "swaggerapi/swagger-ui": return get_latest_version(tags, prefix="v") return get_latest_version(tags)