def read_composer_yamls(config_files: List[Path]) -> Tuple[List[Path], List[Path]]: base_files: List[Path] = [] all_files: List[Path] = [] # YAML CHECK UP for path in config_files: try: # This is to verify that mandatory files exist and yml syntax is valid conf = load_yaml_file(file=path, is_optional=False) if conf.get("version") != COMPOSE_FILE_VERSION: # pragma: no cover log.warning( "Compose file version in {} is {}, expected {}", path, conf.get("version"), COMPOSE_FILE_VERSION, ) if path.exists(): all_files.append(path) # Base files are those loaded from CONFS_DIR if CONFS_DIR in path.parents: base_files.append(path) except KeyError as e: # pragma: no cover print_and_exit("Error reading {}: {}", path, str(e)) return all_files, base_files
def mix_configuration(base, custom): if base is None: base = {} for key, elements in custom.items(): if key not in base: base[key] = custom[key] continue if elements is None: if isinstance(base[key], dict): log.warning("Cannot replace {} with empty list", key) continue if isinstance(elements, dict): mix_configuration(base[key], custom[key]) elif isinstance(elements, list): for e in elements: base[key].append(e) else: base[key] = elements return base
def interfaces( service: ServiceTypes = typer.Argument( ..., help="Service name", ), detach: bool = typer.Option( False, "--detach", help="Detached mode to run the container in background", show_default=False, ), port: Optional[int] = typer.Option( None, "--port", "-p", help="port to be associated to the current service interface", ), ) -> None: Application.print_command( Application.serialize_parameter("--detach", detach, IF=detach), Application.serialize_parameter("--port", port, IF=port), Application.serialize_parameter("", service), ) # Deprecated since 1.2 if service.value == "sqlalchemy": log.warning("Deprecated interface sqlalchemy, use adminer instead") return None # Deprecated since 2.1 print_and_exit("Interfaces command is replaced by rapydo run {}", service)
def print_obsolete( image: str, date1: str, date2: str, service: Optional[str], from_img: Optional[str] = None, ) -> None: if service: if from_img: log.warning( """Obsolete image {}: built on {} FROM {} that changed on {} Update it with: {command}""", image, date1, from_img, date2, command=RED(f"rapydo build {service}"), ) else: log.warning( """Obsolete image {}: built on {} but changed on {} Update it with: {command}""", image, date1, date2, command=RED(f"rapydo pull {service}"), )
def check_unstaged(path, gitobj): unstaged = get_unstaged_files(gitobj) if len(unstaged['changed']) > 0 or len(unstaged['untracked']) > 0: log.warning("You have unstaged files on {}", path) print_diff(gitobj, unstaged) return unstaged
def create_task( project_scaffold: Project, name: str, services: List[str], auth: str, force: bool, add_tests: bool, ) -> None: path = project_scaffold.p_path("backend", "tasks") path = path.joinpath(f"{name}.py") templating = Templating() create_template( templating, "task_template.py", path, name, services, auth, force, project_scaffold.project, ) log.info("Task created: {}", path) if add_tests: log.warning("Tests for tasks not implemented yet")
def get_strong_password() -> str: p = password(length=16, param_not_used="", symbols="%*,-.=^_~") result = zxcvbn(p) score = result["score"] # Should never happens since 16 characters with symbols is very unlikely to be weak if score < 4: # pragma: no cover log.warning("Generated password is not strong enough, sampling again") return get_strong_password() return p
def get_active_branch(gitobj): if gitobj is None: log.error("git object is None, cannot retrieve active branch") return None try: return str(gitobj.active_branch) except TypeError as e: log.warning(e) return None
def get_active_branch(gitobj: Optional[Repo]) -> Optional[str]: if not gitobj: log.error("git object is None, cannot retrieve active branch") return None try: return gitobj.active_branch.name except AttributeError as e: # pragma: no cover log.warning(e) return None
def get_local(path): try: gitobj = get_repo(path) if len(gitobj.remotes) == 0: log.warning("Unable to fetch remotes from {}", path) return None return gitobj.remotes.origin.url except InvalidGitRepositoryError: return None
def info(self): infos = '\n' base_endpoint = False endpoint = self.endpoint_name # look inside extended swagger definition backend = self.backend_dir needle = self.find_swagger(endpoint, backend) # or look inside base swagger definition of rapydo if needle is None: backend = self.base_backend_dir needle = self.find_swagger(endpoint, backend) base_endpoint = True python_file_dir = Path(backend, 'resources') else: python_file_dir = Path(backend, ENDPOINTS_CODE_DIR) if needle is None: log.exit('No endpoint "{}" found in current swagger definition', endpoint) current_dir = Path.cwd() uri = Path(needle.get('baseuri', '/api'), endpoint) infos += 'Endpoint path:\t{}\n'.format(uri) swagger_dir = Path(current_dir, backend, SWAGGER_DIR, needle.get('swagger')) infos += 'Swagger path:\t{}/\n'.format(swagger_dir) infos += 'Labels:\t\t{}\n'.format(", ".join(needle.get('labels'))) python_file_path = Path(current_dir, python_file_dir, needle.get('file') + '.py') infos += 'Python file:\t{}\n'.format(python_file_path) python_class = needle.get('class') infos += 'Python class:\t{}\n'.format(python_class) log.info("Informations about '{}':\n{}", endpoint, infos) if base_endpoint: log.warning( "This is a BASE endpoint of the RAPyDo framework.\n" + "Do not modify it unless your are not a RAPyDo developer.") with open(str(python_file_path)) as fh: content = fh.read() clstest = 'class {}('.format(python_class) if clstest not in content: log.critical("Class '{}' definition not found in python file", python_class)
def name_priority(name1, name2): if name1 not in name_priorities: log.warning("Cannot determine build priority name for {}", name1) return name2 if name2 not in name_priorities: log.warning("Cannot determine build priority name for {}", name2) return name1 p1 = name_priorities.index(name1) p2 = name_priorities.index(name2) if p1 <= p2: return name1 return name2
def check_program( program: str, min_version: Optional[str] = None, max_version: Optional[str] = None, min_recommended_version: Optional[str] = None, ) -> str: """ Verify if a binary exists and (optionally) its version """ found_version = Packages.get_bin_version(program) if found_version is None: hints = "" if program == "docker": # pragma: no cover install_cmd = RED("rapydo install docker") hints = "\n\nTo install docker visit: https://get.docker.com" hints += f"or execute {install_cmd}" print_and_exit("A mandatory dependency is missing: {} not found{}", program, hints) v = Version(found_version) if min_version is not None: if Version(min_version) > v: print_and_exit( "Minimum supported version for {} is {}, found {}", program, min_version, found_version, ) if min_recommended_version is not None: if Version(min_recommended_version) > v: log.warning( "Minimum recommended version for {} is {}, found {}", program, min_recommended_version, found_version, ) if max_version is not None: if Version(max_version) < v: print_and_exit( "Maximum supported version for {} is {}, found {}", program, max_version, found_version, ) log.debug("{} version: {}", program, found_version) return found_version
def set_action(action: Optional[str], params: Dict[str, Any]) -> None: Configuration.action = action Configuration.initialize = Configuration.action == "init" Configuration.update = Configuration.action == "update" Configuration.check = Configuration.action == "check" Configuration.install = Configuration.action == "install" Configuration.print_version = Configuration.action == "version" Configuration.create = Configuration.action == "create" params.pop("version") # This will start to fail when this parameter will be dropped params.pop("services_list") Configuration.parameters = [] project = params.pop("project") if project and project != Configuration.projectrc.get("project"): Configuration.parameters.append(f"--project {project}") hostname = params.pop("hostname") if hostname != "localhost" and hostname != Configuration.projectrc.get( "hostname"): Configuration.parameters.append(f"--hostname {hostname}") stack = params.pop("stack") if stack and stack != Configuration.projectrc.get("stack"): Configuration.parameters.append(f"--stack {stack}") if params.pop("production"): Configuration.parameters.append("--production") if params.pop("testing"): Configuration.parameters.append("--testing") environment = params.pop("environment") if environment: for e in environment: Configuration.parameters.append(f"--env {e}") remote_engine = params.pop("remote_engine") if remote_engine and stack: Configuration.parameters.append(f"--remote {remote_engine}") if params.pop("no_backend"): Configuration.parameters.append("--no-backend") if params.pop("no_frontend"): Configuration.parameters.append("--no-frontend") if params.pop("no_commons"): Configuration.parameters.append("--no-commons") if params: log.warning("Found unknown parameters: {}", params)
def swagger_dir(self): self.swagger_path = Path(self.backend_dir, SWAGGER_DIR, self.endpoint_dir) if self.swagger_path.exists(): log.warning('Path {} already exists', self.swagger_path) if not self.force_yes: self.ask_yes_or_no( 'Would you like to proceed and overwrite definition?', error='Cannot overwrite definition', ) self.create_folder(self.swagger_path)
def command(self, command, options=None, nofailure=False): # NOTE: debug defaults # tmp = self.get_defaults(command) # print("TEST", tmp, type(tmp)) # # exit(1) compose_handler = self.get_handle() method = getattr(compose_handler, command) if options is None: options = {} if options.get('SERVICE', None) is None: options['SERVICE'] = [] log.debug("{}'{}'", compose_log, command) out = None # sometimes this import stucks... importing here to avoid unnecessary waits from docker.errors import APIError try: out = method(options=options) except SystemExit as e: # NOTE: we check the status here. # System exit is received also when a normal command finished. if e.code < 0: log.warning("Invalid code returned: {}", e.code) elif e.code > 0: log.exit("Compose received: system.exit({})", e.code, error_code=e.code) else: log.verbose("Executed compose {} w/{}", command, options) except (clierrors.UserError, cerrors.OperationFailedError, BuildError) as e: msg = "Failed command execution:\n{}".format(e) if nofailure: raise AttributeError(msg) else: log.exit(msg) except APIError as e: log.exit("Failed docker container:\n{}", e) except (ProjectError, NoSuchService) as e: log.exit(str(e)) else: log.verbose("Executed compose {} w/{}", command, options) return out
def create_local_path(path: Path, label: str) -> None: try: path.mkdir(parents=True) log.warning("A {} was missing and was automatically created: {}", label, path) except PermissionError: uid = system.get_current_uid() gid = system.get_current_gid() command = f"sudo mkdir -p {path} && sudo chown {uid}:{gid} {path}" hint = f"\nSuggested command: {RED(command)}" print_and_exit( "A {} is missing and can't be automatically created: {}{}", label, path, hint, )
def load_yaml_file(file, path, keep_order=False, is_optional=False): """ Import any data from a YAML file. """ filepath = get_yaml_path(file, path=path) if filepath is None: if is_optional: log.info( "Failed to read YAML file {}/{}: File does not exist", path, file, ) else: log.exit( "Failed to read YAML file {}/{}: File does not exist", path, file, ) return {} with open(filepath) as fh: try: if keep_order: OrderedLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) loader = yaml.load_all(fh, OrderedLoader) else: loader = yaml.load_all(fh, yaml.loader.Loader) docs = list(loader) if len(docs) == 0: log.exit("YAML file is empty: {}", filepath) return docs[0] except Exception as e: # # IF dealing with a strange exception string (escaped) # import codecs # error, _ = codecs.getdecoder("unicode_escape")(str(error)) log.warning("Failed to read YAML file [{}]: {}", filepath, e) return {}
def name_priority(name1: str, name2: str) -> str: # Prevents warning: Cannot determine build priority with custom services if name1 in Application.data.custom_services: return name2 if name2 in Application.data.custom_services: return name1 if name1 not in name_priorities or name2 not in name_priorities: log.warning("Cannot determine build priority between {} and {}", name1, name2) return name2 p1 = name_priorities.index(name1) p2 = name_priorities.index(name2) if p1 <= p2: return name1 return name2
def find_templates_override(services, templates): # Template and vanilla builds involved in override tbuilds = {} vbuilds = {} for service in services: builder = service.get('build') if builder is not None: dpath = builder.get('context') dockerfile = os.path.join(os.curdir, CONTAINERS_YAML_DIRNAME, dpath) dfp = DockerfileParser(dockerfile) try: cont = dfp.content if cont is None: log.warning("Dockerfile is empty?") else: log.verbose("Parsed dockerfile {}", dpath) except FileNotFoundError as e: log.exit(e) if dfp.baseimage is None: dfp.baseimage = 'unknown_build' # elif dfp.baseimage.endswith(':template'): elif dfp.baseimage.startswith('rapydo/'): if dfp.baseimage not in templates: log.exit("""Unable to find {} in this project \nPlease inspect the FROM image in {}/Dockerfile """.format(dfp.baseimage, dockerfile)) else: vanilla_img = service.get('image') template_img = dfp.baseimage log.verbose("{} overrides {}", vanilla_img, template_img) tbuilds[template_img] = templates.get(template_img) vbuilds[vanilla_img] = template_img return tbuilds, vbuilds
def switch_branch(gitobj, branch_name='master', remote=True): if branch_name is None: log.error("Unable to switch to a none branch") return False if gitobj.active_branch.name == branch_name: path = os.path.basename(gitobj.working_dir) log.info("You are already on branch {} on {}", branch_name, path) return False if remote: branches = gitobj.remotes[0].fetch() else: branches = gitobj.branches branch = None branch_found = False for branch in branches: if remote: branch_found = branch.name.endswith('/' + branch_name) else: branch_found = branch.name == branch_name if branch_found: break if not branch_found or branch is None: log.warning("Branch {} not found", branch_name) return False try: gitobj.git.checkout(branch_name) except GitCommandError as e: log.warning(e) return False path = os.path.basename(gitobj.working_dir) log.info("Switched branch to {} on {}", branch, path) return True
def test_class(self): filename = 'test_{}.py'.format(self.endpoint_name) self.tests_path = Path(self.backend_dir, 'tests') filepath = Path(self.tests_path, filename) if filepath.exists(): log.warning('File {} already exists', filepath) if not self.force_yes: self.ask_yes_or_no( 'Would you like to proceed and overwrite that code?', error='Cannot overwrite the original file', ) self.render( filename, template_filename='unittests.py', data={ 'endpoint_name': self.endpoint_name, 'class_name': self.class_name }, outdir=self.tests_path, )
def mix_configuration( base: Optional[Configuration], custom: Optional[Configuration] ) -> Configuration: # WARNING: This function has the side effect of changing the input base dict! if base is None: base = {} if custom is None: return base for key, elements in custom.items(): if key not in base: # TypedDict key must be a string literal; base[key] = custom[key] # type: ignore continue if elements is None: # pragma: no cover # TypedDict key must be a string literal; if isinstance(base[key], dict): # type: ignore log.warning("Cannot replace {} with empty list", key) continue if isinstance(elements, dict): # TypedDict key must be a string literal; base[key] = mix_configuration(base[key], custom[key]) # type: ignore elif isinstance(elements, list): for e in elements: # pragma: no cover # TypedDict key must be a string literal; base[key].append(e) # type: ignore else: # TypedDict key must be a string literal; base[key] = elements # type: ignore return base
def switch_branch(gitobj: Optional[Repo], branch_name: str) -> bool: if not gitobj: log.error("git object is None, cannot switch the active branch") return False current_branch = gitobj.active_branch.name path: str = "N/A" if gitobj.working_dir: path = Path(gitobj.working_dir).name if current_branch == branch_name: log.info("{} already set on branch {}", path, branch_name) return True branches = gitobj.remotes[0].fetch() branch = None for b in branches: if b.name.endswith(f"/{branch_name}"): branch = b break if not branch: log.warning("Branch {} not found", branch_name) return False try: gitobj.git.checkout(branch_name) except GitCommandError as e: # pragma: no cover log.error(e) return False log.info("Switched {} branch from {} to {}", path, current_branch, branch_name) return True
def update(path, gitobj): unstaged = get_unstaged_files(gitobj) changed = len(unstaged['changed']) > 0 untracked = len(unstaged['untracked']) > 0 if changed or untracked: log.critical("Unable to update {} repo, you have unstaged files", path) print_diff(gitobj, unstaged) sys.exit(1) for remote in gitobj.remotes: if remote.name == 'origin': try: branch = gitobj.active_branch log.info("Updating {} {} (branch {})", remote, path, branch) remote.pull(branch) except GitCommandError as e: log.error("Unable to update {} repo\n{}", path, e) except TypeError as e: if TESTING: log.warning("Unable to update {} repo, {}", path, e) else: log.exit("Unable to update {} repo, {}", path, e)
def git_update(ignore_submodule: List[str]) -> None: for name, gitobj in Application.gits.items(): if name in ignore_submodule: log.debug("Skipping update on {}", name) continue if gitobj and not git.can_be_updated(name, gitobj): print_and_exit("Can't continue with updates") controller_is_updated = False for name, gitobj in Application.gits.items(): if name in ignore_submodule: continue if name == "do": controller_is_updated = True if gitobj: git.update(name, gitobj) if controller_is_updated: installation_path = Packages.get_installation_path("rapydo") # Can't be tested on GA since rapydo is alway installed from a folder if not installation_path: # pragma: no cover log.warning("Controller is not installed in editable mode, " "rapydo is unable to update it") elif Application.gits["do"].working_dir: do_dir = Path(Application.gits["do"].working_dir) if do_dir.is_symlink(): do_dir = do_dir.resolve() # This can be used starting from py39 # do_dir = do_dir.readlink() if do_dir == installation_path: log.info("Controller installed from {} and updated", installation_path) else: log.warning( "Controller not updated because it is installed outside this " "project. Installation path is {}, the current folder is {}", installation_path, do_dir, ) else: # pragma: no cover log.warning("Controller submodule folder can't be found")
def check( no_git: bool = typer.Option( False, "--no-git", "-s", help="Skip checks on git commits", show_default=False, ), no_builds: bool = typer.Option( False, "--no-builds", help="Skip check on docker builds", show_default=False, ), ignore_submodules: List[str] = typer.Option( [], "--ignore-submodule", "-i", help="Ignore submodule", show_default=False, shell_complete=Application.autocomplete_submodule, ), ) -> None: Application.print_command( Application.serialize_parameter("--no-git", no_git, IF=no_git), Application.serialize_parameter("--no-builds", no_builds, IF=no_builds), Application.serialize_parameter("--ignore-submodule", ignore_submodules), ) Application.get_controller().controller_init() docker = Docker() if Configuration.swarm_mode: log.debug("Swarm is correctly initialized") docker.swarm.check_resources() if no_git: log.info("Skipping git checks") else: log.info("Checking git (skip with --no-git)") Application.git_checks(ignore_submodules) if no_builds: log.info("Skipping builds checks") else: log.info("Checking builds (skip with --no-builds)") dimages: List[str] = [] for img in docker.client.images(): if img.repo_tags: for i in img.repo_tags: dimages.append(i) all_builds = find_templates_build(Application.data.compose_config) core_builds = find_templates_build(Application.data.base_services) overriding_builds = find_templates_override( Application.data.compose_config, core_builds) for image_tag, build in all_builds.items(): services = build["services"] if not any(x in Application.data.active_services for x in services): continue if image_tag not in dimages: if image_tag in core_builds: log.warning( "Missing {} image, execute {command}", image_tag, command=RED("rapydo pull"), ) else: log.warning( "Missing {} image, execute {command}", image_tag, command=RED("rapydo build"), ) continue image_creation = get_image_creation(image_tag) # Check if some recent commit modified the Dockerfile d1, d2 = build_is_obsolete(image_creation, build.get("path")) if d1 and d2: tmp_from_image = overriding_builds.get(image_tag) # This is the case of a build not overriding a core image, # e.g nifi or geoserver. In that case from_image is faked to image_tag # just to make print_obsolete to print 'build' instead of 'pull' if not tmp_from_image and image_tag not in core_builds: tmp_from_image = image_tag print_obsolete(image_tag, d1, d2, build.get("service"), tmp_from_image) # if FROM image is newer, this build should be re-built elif image_tag in overriding_builds: from_img = overriding_builds.get(image_tag, "") from_build: Optional[TemplateInfo] = core_builds.get(from_img) if not from_build: # pragma: no cover log.critical("Malformed {} image, from build is missing", image_tag) continue # Verify if template build exists if from_img not in dimages: # pragma: no cover log.warning( "Missing template build for {} ({})\n{}", from_build.get("services"), from_img, ) from_timestamp = get_image_creation(from_img) # Verify if template build is obsolete or not d1, d2 = build_is_obsolete(from_timestamp, from_build.get("path")) if d1 and d2: # pragma: no cover print_obsolete(from_img, d1, d2, from_build.get("service")) if from_timestamp > image_creation: b = image_creation.strftime(DATE_FORMAT) c = from_timestamp.strftime(DATE_FORMAT) print_obsolete(image_tag, b, c, build.get("service"), from_img) templating = Templating() for filename in Application.project_scaffold.fixed_files: if templating.file_changed(str(filename)): log.warning( "{} changed, please execute {command}", filename, command=RED(f"rapydo upgrade --path {filename}"), ) compose_version = "Unknown" buildx_version = "Unknown" m = re.search( r"^Docker Compose version (v[0-9]+\.[0-9]+\.[0-9]+)$", docker.client.compose.version(), ) if m: compose_version = m.group(1) m = re.search( r"^github.com/docker/buildx (v[0-9]+\.[0-9]+\.[0-9]+) .*$", docker.client.buildx.version(), ) if m: buildx_version = m.group(1) if compose_version == COMPOSE_VERSION: log.info("Compose is installed with version {}", COMPOSE_VERSION) else: # pragma: no cover cmd = RED("rapydo install compose") fix_hint = f"You can update it with {cmd}" log.warning( "Compose is installed with version {}, expected version is {}.\n{}", compose_version, COMPOSE_VERSION, fix_hint, ) if buildx_version == BUILDX_VERSION: log.info("Buildx is installed with version {}", BUILDX_VERSION) else: # pragma: no cover cmd = RED("rapydo install buildx") fix_hint = f"You can update it with {cmd}" log.warning( "Buildx is installed with version {}, expected version is {}.\n{}", buildx_version, BUILDX_VERSION, fix_hint, ) for expired_passwords in get_expired_passwords(): log.warning( "{} is expired on {}", expired_passwords[0], expired_passwords[1].strftime("%Y-%m-%d"), ) log.info("Checks completed")
def run( service: str = typer.Argument( ..., help="Service name", shell_complete=Application.autocomplete_allservice, ), pull: bool = typer.Option( False, "--pull", help="Pull the image before starting the container", show_default=False, ), debug: bool = typer.Option( False, "--debug", help="Start the container in debug mode", show_default=False, ), command: str = typer.Option( None, "--command", help="UNIX command to be executed in the container", show_default=False, ), user: str = typer.Option( None, "--user", "-u", help="User existing in selected service", show_default=False, ), first_port: Optional[int] = typer.Option( None, "--port", "-p", help="port to be associated to the current service interface", ), detach: Optional[bool] = typer.Option( None, "--detach", help="Start the container in detach mode (default for non-interfaces)", show_default=False, ), ) -> None: Application.print_command( Application.serialize_parameter("--pull", pull, IF=pull), Application.serialize_parameter("--debug", debug, IF=debug), Application.serialize_parameter("--command", command, IF=command), Application.serialize_parameter("--user", user, IF=user), Application.serialize_parameter("--port", first_port, IF=first_port), Application.serialize_parameter("", service), ) Configuration.FORCE_COMPOSE_ENGINE = True Application.get_controller().controller_init() Application.get_controller().check_placeholders_and_passwords( Application.data.compose_config, [service]) if service == REGISTRY and not Configuration.swarm_mode: print_and_exit("Can't start the registry in compose mode") docker = Docker() if Configuration.swarm_mode: if service != REGISTRY: docker.registry.ping() else: if docker.registry.ping(do_exit=False): registry = docker.registry.get_host() print_and_exit("The registry is already running at {}", registry) if docker.client.container.exists("registry"): log.debug( "The registry container is already existing, removing") docker.client.container.remove("registry", force=True) if not debug: if user: print_and_exit("Can't specify a user if debug mode is OFF") if command: print_and_exit("Can't specify a command if debug mode is OFF") if user: log.warning( "Please remember that users in volatile containers are not mapped on" " current uid and gid. You should not write or modify files on volumes" " to prevent permissions errors") if pull: log.info("Pulling image for {}...", service) docker.client.compose.pull([service]) else: verify_available_images( [service], Application.data.compose_config, Application.data.base_services, is_run_command=True, ) # This is equivalent to the old volatile command if debug: if not command: command = "bash" log.info("Starting {}...", service) docker.compose.create_volatile_container( service, command=command, user=user, # if None the wrapper will automatically switch the default ones # How to prevent ports on volatile containers? # publish=None, ) log.info("Service {} removed", service) return None # This is equivalent to the old registry command if service == REGISTRY: # @ symbol in secrets is not working # https://github.com/bitnami/charts/issues/1954 # Other symbols like # and " also lead to configuration errors os.environ["REGISTRY_HTTP_SECRET"] = password( param_not_used="", length=96 # , symbols="%*,-.=?[]^_~" ) publish_ports = get_publish_ports(service, first_port) if detach is None: if service == "swaggerui" or service == "adminer": detach = False else: detach = True log.info("Running {}...", service) if service == "swaggerui": if Configuration.production: prot = "https" else: prot = "http" port = publish_ports[0][0] if publish_ports else first_port log.info( "You can access SwaggerUI web page here: {}\n", f"{prot}://{Configuration.hostname}:{port}", ) if service == "adminer": if Configuration.production: prot = "https" else: prot = "http" port = publish_ports[0][0] if publish_ports else first_port log.info( "You can access Adminer interface on: {}\n", f"{prot}://{Configuration.hostname}:{port}", ) docker.compose.create_volatile_container(service, detach=detach, publish=publish_ports)
def backup( service: SupportedServices = typer.Argument(..., help="Service name"), force: bool = typer.Option( False, "--force", help="Force the backup procedure", show_default=False, ), max_backups: int = typer.Option( 0, "--max", help= "Maximum number of backups, older exceeding this number will be removed", show_default=False, ), dry_run: bool = typer.Option( False, "--dry-run", help="Do not perform any backup or delete backup files", show_default=False, ), restart: List[str] = typer.Option( [], "--restart", help= "Service to be restarted once completed the backup (multiple allowed)", shell_complete=Application.autocomplete_service, ), ) -> None: Application.print_command( Application.serialize_parameter("--force", force, IF=force), Application.serialize_parameter("--max", max_backups, IF=max_backups), Application.serialize_parameter("--dry-run", dry_run, IF=dry_run), Application.serialize_parameter("--restart", restart, IF=restart), Application.serialize_parameter("", service.value), ) if dry_run: log.warning("Dry run mode is enabled") Application.get_controller().controller_init() service_name = service.value verify_available_images( [service_name], Application.data.compose_config, Application.data.base_services, ) docker = Docker() container = docker.get_container(service_name) backup_dir = BACKUP_DIR.joinpath(service_name) backup_dir.mkdir(parents=True, exist_ok=True) if max_backups > 0: backups = list(backup_dir.glob(get_date_pattern())) if max_backups >= len(backups): log.debug("Found {} backup files, maximum not reached", len(backups)) else: for f in sorted(backups)[:-max_backups]: if not dry_run: f.unlink() log.warning( "{} deleted because exceeding the max number of backup files ({})", f.name, max_backups, ) module = BACKUP_MODULES.get(service.value) if not module: # pragma: no cover print_and_exit(f"{service.value} misconfiguration, module not found") now = datetime.now().strftime("%Y_%m_%d-%H_%M_%S") module.backup(container=container, now=now, force=force, dry_run=dry_run) if restart and not dry_run: log.info("Restarting services in 20 seconds...") time.sleep(10) log.info("Restarting services in 10 seconds...") time.sleep(10) reload(docker, restart)
def build( services: List[str] = typer.Argument( None, help="Services to be built", shell_complete=Application.autocomplete_service, ), core: bool = typer.Option( False, "--core", help="Include core images to the build list", show_default=False, ), force: bool = typer.Option( False, "--force", "-f", help="remove the cache to force the build", show_default=False, ), ) -> bool: Application.print_command( Application.serialize_parameter("--core", core, IF=core), Application.serialize_parameter("--force", force, IF=force), Application.serialize_parameter("", services), ) Application.get_controller().controller_init(services) docker = Docker() if docker.client.buildx.is_installed(): v = docker.client.buildx.version() log.debug("docker buildx is installed: {}", v) else: # pragma: no cover print_and_exit( "A mandatory dependency is missing: docker buildx not found" "\nInstallation guide: https://github.com/docker/buildx#binary-release" "\nor try the automated installation with {command}", command=RED("rapydo install buildx"), ) if Configuration.swarm_mode: docker.registry.ping() docker.registry.login() images: Set[str] = set() if core: log.debug("Forcing rebuild of core builds") # Create merged compose file with core files only docker = Docker(compose_files=Application.data.base_files) docker.compose.dump_config(Application.data.services, set_registry=False) log.debug("Compose configuration dumped on {}", COMPOSE_FILE) docker.client.buildx.bake( targets=Application.data.services, files=[COMPOSE_FILE], pull=True, load=True, cache=not force, ) log.info("Core images built") if Configuration.swarm_mode: log.warning( "Local registry push is not implemented yet for core images") docker = Docker() docker.compose.dump_config(Application.data.services, set_registry=False) log.debug("Compose configuration dumped on {}", COMPOSE_FILE) core_builds = find_templates_build(Application.data.base_services) all_builds = find_templates_build(Application.data.compose_config) services_with_custom_builds: List[str] = [] for image, build in all_builds.items(): if image not in core_builds: # this is used to validate the target Dockerfile: if p := build.get("path"): get_dockerfile_base_image(p, core_builds) services_with_custom_builds.extend(build["services"]) images.add(image)