def check_installed_software() -> None: log.debug( "python version: {}.{}.{}", sys.version_info.major, sys.version_info.minor, sys.version_info.micro, ) # 17.05 added support for multi-stage builds # https://docs.docker.com/compose/compose-file/compose-file-v3/#compose-and-docker-compatibility-matrix # 18.09.2 fixed the CVE-2019-5736 vulnerability # 20.10.0 introduced copy --chmod and improved logging Packages.check_program("docker", min_version="20.10.0", min_recommended_version="20.10.0") if docker.compose.is_installed(): # too slow to verify the version on every commands... near half a seconds # Sometimes a couple of seconds! # v = docker.compose.version() # log.debug("docker compose is installed: {}", v) log.debug("docker compose is installed") else: # pragma: no cover print_and_exit( "A mandatory dependency is missing: docker compose not found" "\nInstallation guide: " "https://docs.docker.com/compose/cli-command/#installing-compose-v2" "\nor try the automated installation with {command}", command=RED("rapydo install compose"), )
def install_controller_from_git(version: str) -> None: controller = f"git+https://github.com/rapydo/do.git@{version}" log.info("You asked to install rapydo {} from git", version) Packages.install(controller, editable=False) log.info("Controller version {} installed from git", version)
def test_execute_command() -> None: out = Packages.execute_command("echo", ["-n", "Hello World"]) assert out == "Hello World" out = Packages.execute_command("echo", ["Hello World"]) assert out == "Hello World\n" with pytest.raises(ExecutionException): assert Packages.execute_command("ls", ["doesnotexistforsure"])
def test_download() -> None: with pytest.raises(SystemExit): Packages.download("https://www.google.com/test", "") with pytest.raises(SystemExit): Packages.download( "https://github.com/rapydo/do/archive/refs/tags/v1.2.zip", "thisisawrongchecksum", ) downloaded = Packages.download( "https://github.com/rapydo/do/archive/refs/tags/v1.2.zip", "dc07bef0d12a7a9cfd0f383452cbcb6d", ) assert downloaded is not None
def install_controller_from_folder(version: str) -> None: do_path = SUBMODULES_DIR.joinpath("do") try: Application.git_submodules() except SystemExit: log.info( """You asked to install rapydo {ver} in editable mode, but {p} is missing. You can force the installation by disabling the editable mode: rapydo install {ver} --no-editable """, ver=version, p=do_path, ) raise log.info( "You asked to install rapydo {}. It will be installed in editable mode", version, ) do_repo = Application.gits.get("do") b = git.get_active_branch(do_repo) if b is None: log.error( "Unable to read local controller repository") # pragma: no cover elif b == version: log.info("Controller repository already at {}", version) elif git.switch_branch(do_repo, version): log.info("Controller repository switched to {}", version) else: print_and_exit("Invalid version") Packages.install(do_path, editable=True) log.info("Controller version {} installed from local folder", version)
def git_update(ignore_submodule: List[str]) -> None: for name, gitobj in Application.gits.items(): if name in ignore_submodule: log.debug("Skipping update on {}", name) continue if gitobj and not git.can_be_updated(name, gitobj): print_and_exit("Can't continue with updates") controller_is_updated = False for name, gitobj in Application.gits.items(): if name in ignore_submodule: continue if name == "do": controller_is_updated = True if gitobj: git.update(name, gitobj) if controller_is_updated: installation_path = Packages.get_installation_path("rapydo") # Can't be tested on GA since rapydo is alway installed from a folder if not installation_path: # pragma: no cover log.warning("Controller is not installed in editable mode, " "rapydo is unable to update it") elif Application.gits["do"].working_dir: do_dir = Path(Application.gits["do"].working_dir) if do_dir.is_symlink(): do_dir = do_dir.resolve() # This can be used starting from py39 # do_dir = do_dir.readlink() if do_dir == installation_path: log.info("Controller installed from {} and updated", installation_path) else: log.warning( "Controller not updated because it is installed outside this " "project. Installation path is {}, the current folder is {}", installation_path, do_dir, ) else: # pragma: no cover log.warning("Controller submodule folder can't be found")
def install( version: str = typer.Argument("auto", help="Version to be installed"), editable: bool = typer.Option( True, "--no-editable", help="Disable editable mode", show_default=False, ), ) -> None: Application.print_command( Application.serialize_parameter("--no-editable", not editable, IF=not editable), Application.serialize_parameter("", version), ) if version == "docker": Packages.install_docker() return None if version == "compose": Packages.install_compose() return None if version == "buildx": Packages.install_buildx() return None Application.get_controller().controller_init() if version == "auto": version = Configuration.rapydo_version log.info("Detected version {} to be installed", version) if editable: install_controller_from_folder(version) else: install_controller_from_git(version)
def test( test: str = typer.Argument(None, help="Name of the test to be executed"), swarm_mode: bool = typer.Option( False, "--swarm", help="Execute the test in swarm mode", show_default=False, ), no_remove: bool = typer.Option( False, "--no-rm", help="Do not remove the container", show_default=False, ), # I have no need to test a command to locally execute tests # and I would like to preventany recursive test execution! ) -> None: # pragma: no cover Application.print_command( Application.serialize_parameter("--swarm", swarm_mode, IF=swarm_mode), Application.serialize_parameter("--no-rm", no_remove, IF=no_remove), Application.serialize_parameter("", test), ) controller_path = Packages.get_installation_path("rapydo") # Can't really happen... if not controller_path: # pragma: no cover print_and_exit("Controller path not found") if not test: log.info("Choose a test to be executed:") for f in sorted(controller_path.joinpath("tests").glob("test_*.py")): test_name = f.with_suffix("").name.replace("test_", "") print(f" - {test_name}") return None test_file = Path("tests", f"test_{test}.py") if not controller_path.joinpath(test_file).exists(): print_and_exit("Invalid test name {}", test) image_name = f"rapydo/controller:{__version__}" container_name = "controller" docker.image.pull(image_name) if docker.container.exists(container_name): docker.container.remove(container_name, force=True, volumes=True) docker.container.run( image_name, detach=True, privileged=True, remove=True, volumes=[(controller_path, "/code")], name=container_name, envs={ "TESTING": "1", "SWARM_MODE": "1" if swarm_mode else "0", }, ) docker.container.execute( container_name, command="syslogd", interactive=False, tty=False, stream=False, detach=True, ) # Wait few seconds to let the docker daemon to start log.info("Waiting for docker daemon to start...") time.sleep(3) command = ["py.test", "-s", "-x", f"/code/{test_file}"] log.info("Executing command: {}", " ".join(command)) try: docker.container.execute( container_name, command=command, workdir="/tmp", interactive=True, tty=True, stream=False, detach=False, ) except DockerException as e: log.error(e) # Do not remove the container to let for some debugging if not no_remove: docker.container.remove(container_name, force=True, volumes=True) log.info("Test container ({}) removed", container_name)
def test_packages(faker: Faker) -> None: assert Packages.get_bin_version("invalid") is None v = Packages.get_bin_version("git") assert v is not None # Something like 2.25.1 assert len(str(Version(v)).split(".")) == 3 # Check docker client version v = Packages.get_bin_version("docker") assert v is not None # Something like 19.03.8 or 18.06.0-ce assert len(str(Version(v)).split(".")) >= 3 # Check docker engine version v = Packages.get_bin_version( "docker", option=["version", "--format", "'{{.Server.Version}}'"] ) assert v is not None assert len(str(Version(v)).split(".")) >= 3 with pytest.raises(SystemExit): Packages.check_program("invalid") v = Packages.check_program("docker") assert v is not None with pytest.raises(SystemExit): Packages.check_program("docker", min_version="99999.99") with pytest.raises(SystemExit): Packages.check_program("docker", max_version="0.0") v = Packages.check_program("docker", min_version="0.0") assert v is not None v = Packages.check_program("docker", max_version="99999.99") assert v is not None v = Packages.check_program("docker", min_version="0.0", max_version="99999.99") assert v is not None v = Packages.check_program( "docker", min_version="0.0", max_version="99999.99", min_recommended_version="99999.99", ) assert v is not None assert Packages.get_installation_path("invalid") is None assert Packages.get_installation_path("rapydo") is not None assert Packages.get_installation_path("pip") is None assert Packages.convert_bin_to_win32("test") == "test" assert Packages.convert_bin_to_win32("compose") == "compose" assert Packages.convert_bin_to_win32("buildx") == "buildx" assert Packages.convert_bin_to_win32("git") == "git" rand_str = faker.pystr() assert Packages.convert_bin_to_win32(rand_str) == rand_str assert Packages.convert_bin_to_win32("docker") == "docker.exe"
def make_env(self) -> None: try: COMPOSE_ENVIRONMENT_FILE.unlink() except FileNotFoundError: pass Application.env = Configuration.specs.get("variables", {}).get("env", {}) Application.env["PROJECT_DOMAIN"] = Configuration.hostname Application.env["COMPOSE_PROJECT_NAME"] = Configuration.project Application.env["DATA_DIR"] = str(DATA_DIR.resolve()) Application.env["SUBMODULE_DIR"] = str(SUBMODULES_DIR.resolve()) Application.env["PROJECT_DIR"] = str( PROJECT_DIR.joinpath(Configuration.project).resolve()) if self.extended_project_path is None: Application.env["BASE_PROJECT_DIR"] = Application.env[ "PROJECT_DIR"] else: Application.env["BASE_PROJECT_DIR"] = str( self.extended_project_path.resolve()) if self.extended_project is None: Application.env["EXTENDED_PROJECT"] = EXTENDED_PROJECT_DISABLED Application.env["BASE_PROJECT"] = Application.env[ "COMPOSE_PROJECT_NAME"] else: Application.env["EXTENDED_PROJECT"] = str(self.extended_project) Application.env["BASE_PROJECT"] = Application.env[ "EXTENDED_PROJECT"] Application.env["RAPYDO_VERSION"] = __version__ Application.env["BUILD"] = git.get_last_commit( Application.gits["main"]) Application.env["PROJECT_VERSION"] = Configuration.version Application.env["CURRENT_UID"] = str(self.current_uid) Application.env["CURRENT_GID"] = str(self.current_gid) Application.env["PROJECT_TITLE"] = (Configuration.project_title or "Unknown title") Application.env["PROJECT_DESCRIPTION"] = ( Configuration.project_description or "Unknown description") Application.env[ "PROJECT_KEYWORDS"] = Configuration.project_keywords or "" roles_dict = Configuration.specs.get("variables", {}).get("roles", {}) roles = ",".join([ k for k, v in roles_dict.items() if v != "disabled" and k != "default" ]) Application.env["AUTH_ROLES"] = f",{roles}," if Configuration.testing and not Configuration.production: Application.env["APP_MODE"] = "test" Application.env["PYTHONMALLOC"] = "debug" Application.env["PYTHONASYNCIODEBUG"] = "1" Application.env["PYTHONFAULTHANDLER"] = "1" Application.env[ "CELERYBEAT_SCHEDULER"] = services.get_celerybeat_scheduler( Application.env) if Configuration.load_frontend: if Configuration.frontend == ANGULAR: Application.env["ACTIVATE_ANGULAR"] = "1" services.check_rabbit_password( Application.env.get("RABBITMQ_PASSWORD")) services.check_redis_password(Application.env.get("REDIS_PASSWORD")) for e in Application.env: env_value = os.environ.get(e) if env_value is None: continue Application.env[e] = env_value Application.env.update(Configuration.environment) if Configuration.swarm_mode: if not Application.env.get("SWARM_MANAGER_ADDRESS"): Application.env["SWARM_MANAGER_ADDRESS"] = system.get_local_ip( Configuration.production) if not Application.env.get("REGISTRY_HOST"): Application.env["REGISTRY_HOST"] = Application.env[ "SWARM_MANAGER_ADDRESS"] # is None ensure empty string as a valid address # if Application.env.get("SYSLOG_ADDRESS") is None: # manager_addr = Application.env["SWARM_MANAGER_ADDRESS"] # Application.env["SYSLOG_ADDRESS"] = f"tcp://{manager_addr}:514" if Configuration.FORCE_COMPOSE_ENGINE or not Configuration.swarm_mode: DEPLOY_ENGINE = "compose" else: DEPLOY_ENGINE = "swarm" Application.env["DEPLOY_ENGINE"] = DEPLOY_ENGINE # Unfortunately this will only work after the creation of the network # i.e. will be fallen back to 127.0.0.1 the first time try: DOCKER_SUBNET = docker.network.inspect( f"{Configuration.project}_{DEPLOY_ENGINE}_default" ).ipam.config[0]["Subnet"] # The first execution will fail and fallen back to localhost except DockerException: DOCKER_SUBNET = "127.0.0.1" Application.env["DOCKER_SUBNET"] = DOCKER_SUBNET FAIL2BAN_IPTABLES = "legacy" if str(Application.env["ACTIVATE_FAIL2BAN"]) == "1": iptables_version = Packages.get_bin_version("iptables", clean_output=False) nf_tables = iptables_version and "nf_tables" in iptables_version if nf_tables: FAIL2BAN_IPTABLES = "nf_tables" Application.env["FAIL2BAN_IPTABLES"] = FAIL2BAN_IPTABLES configuration.validate_env(Application.env) log.info("Environment configuration is valid") with open(COMPOSE_ENVIRONMENT_FILE, "w+") as whandle: for key, value in sorted(Application.env.items()): if value is None: value = "" else: value = str(value) if " " in value: value = f"'{value}'" whandle.write(f"{key}={value}\n")