def main() -> None: # All imports moved here to prevent to slow down the import of main import warnings from controller import TESTING, log, print_and_exit if TESTING: warnings.filterwarnings("error") else: # pragma: no cover warnings.filterwarnings("default") from colorama import deinit, init # type: ignore from python_on_whales.utils import DockerException from controller.app import Application try: init() Application.load_projectrc() controller = Application() controller.app() except DockerException as e: # pragma: no cover log.critical("Uncatched exception: {}", type(e)) print_and_exit(str(e)) deinit()
def can_be_updated(path: str, gitobj: Repo, do_print: bool = True) -> bool: unstaged = get_unstaged_files(gitobj) updatable = len(unstaged["changed"]) == 0 and len(unstaged["untracked"]) == 0 if not updatable and do_print: log.critical("Unable to update {} repo, you have unstaged files", path) print_diff(gitobj, unstaged) return updatable
def check_redis_password(pwd: Optional[EnvType]) -> None: if pwd: invalid_characters = ["#"] if any(c in str(pwd) for c in invalid_characters): log.critical("Not allowed characters found in REDIS_PASSWORD.") print_and_exit( "Some special characters, including {}, are not allowed " "because make some clients to fail to connect", " ".join(invalid_characters), )
def check_rabbit_password(pwd: Optional[EnvType]) -> None: if pwd: invalid_characters = ["£", "§", "”", "’"] if any(c in str(pwd) for c in invalid_characters): log.critical("Not allowed characters found in RABBITMQ_PASSWORD.") print_and_exit( "Some special characters, including {}, are not allowed " "because make RabbitMQ crash at startup", " ".join(invalid_characters), )
def info(self): infos = '\n' base_endpoint = False endpoint = self.endpoint_name # look inside extended swagger definition backend = self.backend_dir needle = self.find_swagger(endpoint, backend) # or look inside base swagger definition of rapydo if needle is None: backend = self.base_backend_dir needle = self.find_swagger(endpoint, backend) base_endpoint = True python_file_dir = Path(backend, 'resources') else: python_file_dir = Path(backend, ENDPOINTS_CODE_DIR) if needle is None: log.exit('No endpoint "{}" found in current swagger definition', endpoint) current_dir = Path.cwd() uri = Path(needle.get('baseuri', '/api'), endpoint) infos += 'Endpoint path:\t{}\n'.format(uri) swagger_dir = Path(current_dir, backend, SWAGGER_DIR, needle.get('swagger')) infos += 'Swagger path:\t{}/\n'.format(swagger_dir) infos += 'Labels:\t\t{}\n'.format(", ".join(needle.get('labels'))) python_file_path = Path(current_dir, python_file_dir, needle.get('file') + '.py') infos += 'Python file:\t{}\n'.format(python_file_path) python_class = needle.get('class') infos += 'Python class:\t{}\n'.format(python_class) log.info("Informations about '{}':\n{}", endpoint, infos) if base_endpoint: log.warning( "This is a BASE endpoint of the RAPyDo framework.\n" + "Do not modify it unless your are not a RAPyDo developer.") with open(str(python_file_path)) as fh: content = fh.read() clstest = 'class {}('.format(python_class) if clstest not in content: log.critical("Class '{}' definition not found in python file", python_class)
def check_resources(self) -> None: total_cpus = 0.0 total_memory = 0.0 for service in Application.data.active_services: config = Application.data.compose_config[service] # frontend container has no deploy options if not config.deploy: continue if config.deploy.resources.reservations: # int() are needed because python on whales 0.25 extended type of # cpus and replicas to Union[float, str] according to compose-cli typing cpus = int(config.deploy.resources.reservations.cpus) or 0 memory = config.deploy.resources.reservations.memory # the proxy container is now defined as global and without any replicas # => replicas is None => defaulted to 1 replicas = int(config.deploy.replicas or 1) total_cpus += replicas * cpus total_memory += replicas * memory nodes_cpus = 0.0 nodes_memory = 0.0 for node in self.docker.node.list(): nodes_cpus += round(node.description.resources.nano_cpus / 1000000000) nodes_memory += node.description.resources.memory_bytes if total_cpus > nodes_cpus: log.critical( "Your deployment requires {} cpus but your nodes only have {}", total_cpus, nodes_cpus, ) if total_memory > nodes_memory: log.critical( "Your deployment requires {} of RAM but your nodes only have {}", system.bytes_to_str(total_memory), system.bytes_to_str(nodes_memory), )
def update(path, gitobj): unstaged = get_unstaged_files(gitobj) changed = len(unstaged['changed']) > 0 untracked = len(unstaged['untracked']) > 0 if changed or untracked: log.critical("Unable to update {} repo, you have unstaged files", path) print_diff(gitobj, unstaged) sys.exit(1) for remote in gitobj.remotes: if remote.name == 'origin': try: branch = gitobj.active_branch log.info("Updating {} {} (branch {})", remote, path, branch) remote.pull(branch) except GitCommandError as e: log.error("Unable to update {} repo\n{}", path, e) except TypeError as e: if TESTING: log.warning("Unable to update {} repo, {}", path, e) else: log.exit("Unable to update {} repo, {}", path, e)
def check( no_git: bool = typer.Option( False, "--no-git", "-s", help="Skip checks on git commits", show_default=False, ), no_builds: bool = typer.Option( False, "--no-builds", help="Skip check on docker builds", show_default=False, ), ignore_submodules: List[str] = typer.Option( [], "--ignore-submodule", "-i", help="Ignore submodule", show_default=False, shell_complete=Application.autocomplete_submodule, ), ) -> None: Application.print_command( Application.serialize_parameter("--no-git", no_git, IF=no_git), Application.serialize_parameter("--no-builds", no_builds, IF=no_builds), Application.serialize_parameter("--ignore-submodule", ignore_submodules), ) Application.get_controller().controller_init() docker = Docker() if Configuration.swarm_mode: log.debug("Swarm is correctly initialized") docker.swarm.check_resources() if no_git: log.info("Skipping git checks") else: log.info("Checking git (skip with --no-git)") Application.git_checks(ignore_submodules) if no_builds: log.info("Skipping builds checks") else: log.info("Checking builds (skip with --no-builds)") dimages: List[str] = [] for img in docker.client.images(): if img.repo_tags: for i in img.repo_tags: dimages.append(i) all_builds = find_templates_build(Application.data.compose_config) core_builds = find_templates_build(Application.data.base_services) overriding_builds = find_templates_override( Application.data.compose_config, core_builds) for image_tag, build in all_builds.items(): services = build["services"] if not any(x in Application.data.active_services for x in services): continue if image_tag not in dimages: if image_tag in core_builds: log.warning( "Missing {} image, execute {command}", image_tag, command=RED("rapydo pull"), ) else: log.warning( "Missing {} image, execute {command}", image_tag, command=RED("rapydo build"), ) continue image_creation = get_image_creation(image_tag) # Check if some recent commit modified the Dockerfile d1, d2 = build_is_obsolete(image_creation, build.get("path")) if d1 and d2: tmp_from_image = overriding_builds.get(image_tag) # This is the case of a build not overriding a core image, # e.g nifi or geoserver. In that case from_image is faked to image_tag # just to make print_obsolete to print 'build' instead of 'pull' if not tmp_from_image and image_tag not in core_builds: tmp_from_image = image_tag print_obsolete(image_tag, d1, d2, build.get("service"), tmp_from_image) # if FROM image is newer, this build should be re-built elif image_tag in overriding_builds: from_img = overriding_builds.get(image_tag, "") from_build: Optional[TemplateInfo] = core_builds.get(from_img) if not from_build: # pragma: no cover log.critical("Malformed {} image, from build is missing", image_tag) continue # Verify if template build exists if from_img not in dimages: # pragma: no cover log.warning( "Missing template build for {} ({})\n{}", from_build.get("services"), from_img, ) from_timestamp = get_image_creation(from_img) # Verify if template build is obsolete or not d1, d2 = build_is_obsolete(from_timestamp, from_build.get("path")) if d1 and d2: # pragma: no cover print_obsolete(from_img, d1, d2, from_build.get("service")) if from_timestamp > image_creation: b = image_creation.strftime(DATE_FORMAT) c = from_timestamp.strftime(DATE_FORMAT) print_obsolete(image_tag, b, c, build.get("service"), from_img) templating = Templating() for filename in Application.project_scaffold.fixed_files: if templating.file_changed(str(filename)): log.warning( "{} changed, please execute {command}", filename, command=RED(f"rapydo upgrade --path {filename}"), ) compose_version = "Unknown" buildx_version = "Unknown" m = re.search( r"^Docker Compose version (v[0-9]+\.[0-9]+\.[0-9]+)$", docker.client.compose.version(), ) if m: compose_version = m.group(1) m = re.search( r"^github.com/docker/buildx (v[0-9]+\.[0-9]+\.[0-9]+) .*$", docker.client.buildx.version(), ) if m: buildx_version = m.group(1) if compose_version == COMPOSE_VERSION: log.info("Compose is installed with version {}", COMPOSE_VERSION) else: # pragma: no cover cmd = RED("rapydo install compose") fix_hint = f"You can update it with {cmd}" log.warning( "Compose is installed with version {}, expected version is {}.\n{}", compose_version, COMPOSE_VERSION, fix_hint, ) if buildx_version == BUILDX_VERSION: log.info("Buildx is installed with version {}", BUILDX_VERSION) else: # pragma: no cover cmd = RED("rapydo install buildx") fix_hint = f"You can update it with {cmd}" log.warning( "Buildx is installed with version {}, expected version is {}.\n{}", buildx_version, BUILDX_VERSION, fix_hint, ) for expired_passwords in get_expired_passwords(): log.warning( "{} is expired on {}", expired_passwords[0], expired_passwords[1].strftime("%Y-%m-%d"), ) log.info("Checks completed")
def create_volatile_container( self, service: str, command: Optional[str] = None, publish: Optional[List[Union[PortMapping, PortRangeMapping]]] = None, # used by interfaces detach: bool = False, user: Optional[str] = None, ) -> bool: compose_engine_forced = False if Configuration.swarm_mode: # import here to prevent circular imports from controller.app import Application if not Configuration.FORCE_COMPOSE_ENGINE: compose_engine_forced = True Configuration.FORCE_COMPOSE_ENGINE = True # init is needed to reload the configuration to force compose engine Application.get_controller().controller_init() tty = sys.stdout.isatty() try: output = self.docker.compose.run( service=service, name=service, command=Docker.split_command(command), user=user, detach=detach, tty=tty and not detach, stream=not tty and not detach, dependencies=False, remove=True, service_ports=False, publish=publish or [], use_aliases=True, ) if not detach: for out_line in output: # type: ignore # 'stdout' or 'stderr' # Both out and err are collapsed in stdout # Maybe in the future would be useful to keep them separated? # stdstream = out_line[0] line = out_line[1] if isinstance(line, bytes): line = line.decode("UTF-8") print(line.strip()) if compose_engine_forced: Configuration.FORCE_COMPOSE_ENGINE = False # init is needed to reload the configuration to undo compose engine Application.get_controller().controller_init() return True except DockerException as e: log.critical(e) return False
def check_placeholders_and_passwords(compose_services: ComposeServices, active_services: List[str]) -> None: if not active_services: # pragma: no cover print_and_exit("""You have no active service \nSuggestion: to activate a top-level service edit your project_configuration and add the variable "ACTIVATE_DESIREDSERVICE: 1" """) elif Configuration.check: log.info("Active services: {}", ", ".join(active_services), log_to_file=True) extra_services: List[str] = [] if Configuration.swarm_mode and REGISTRY not in active_services: extra_services.append(REGISTRY) all_services = active_services + extra_services missing: Dict[str, Set[str]] = {} passwords: Dict[str, str] = {} passwords_services: Dict[str, Set[str]] = {} for service_name in all_services: # This can happens with `rapydo run swagger` because in case of run # the controller_init method is executed without passing the service # This is because interfaces are not enabled on the base stack and the # controller_init([service]) would fail # As side effect, non-existing services are not blocked if service_name not in compose_services: continue service = compose_services[service_name] if service: for key, value in service.environment.items(): if str(value) == PLACEHOLDER: key = services.normalize_placeholder_variable(key) missing.setdefault(key, set()) missing[key].add(service_name) elif key.endswith("_PASSWORD") and value: key = services.normalize_placeholder_variable(key) passwords.setdefault(key, value) passwords_services.setdefault(key, set()) passwords_services[key].add(service_name) placeholders = [] for variable, raw_services in missing.items(): serv = services.vars_to_services_mapping.get( variable) or raw_services active_serv = [s for s in serv if s in all_services] if active_serv: placeholders.append([variable, ", ".join(active_serv)]) MIN_PASSWORD_SCORE = int( Application.env.get("MIN_PASSWORD_SCORE", 2) # type: ignore ) for variable, raw_services in passwords_services.items(): serv = services.vars_to_services_mapping.get( variable) or raw_services active_serv = [s for s in serv if s in all_services] if active_serv: password = passwords.get(variable) result = zxcvbn(password) score = result["score"] if score < MIN_PASSWORD_SCORE: if score == MIN_PASSWORD_SCORE - 1: log.warning("The password used in {} is weak", variable) elif score == MIN_PASSWORD_SCORE - 2: log.error("The password used in {} is very weak", variable) else: log.critical( "The password used in {} is extremely weak", variable) if placeholders: log.critical( "The following variables are missing in your configuration:") print("") print( tabulate( placeholders, tablefmt=TABLE_FORMAT, headers=["VARIABLE", "SERVICE(S)"], )) print("") log.info("You can fix this error by updating your .projectrc file") sys.exit(1) return None