def restore( container: Optional[Tuple[str, str]], backup_file: str, force: bool ) -> None: if not container: print_and_exit( "The restore procedure requires {} running, please start your stack", SERVICE_NAME, ) docker = Docker() log.info("Starting restore on {}...", SERVICE_NAME) backup_path = f"/backup/{SERVICE_NAME}/{backup_file}" dump_file = backup_file.replace(".gz", "") dump_path = f"/tmp/{dump_file}" docker.exec_command(container, user="******", command=f"cp {backup_path} /tmp/") docker.exec_command( container, user="******", command=f"gunzip -kf /tmp/{backup_file}" ) # Executed as root docker.exec_command(container, user="******", command=f"chown postgres {dump_path}") # By using pg_dumpall the resulting dump can be restored with psql: docker.exec_command( container, user="******", command=f"psql -U sqluser -f {dump_path} postgres", ) log.info("Restore from data{} completed", backup_path)
def logs( services: List[str] = typer.Argument( None, help="Services to be inspected", shell_complete=Application.autocomplete_service, ), follow: bool = typer.Option( False, "--follow", "-f", help="Follow logs", show_default=False, ), tail: int = typer.Option( "500", "--tail", "-t", help="Number of lines to show", ), ) -> None: Application.print_command( Application.serialize_parameter("--follow", follow, IF=follow), Application.serialize_parameter("--tail", tail, IF=tail), Application.serialize_parameter("", services), ) Application.get_controller().controller_init(services) services = Application.data.services docker = Docker() try: docker.compose.logs(services, follow=follow, tail=tail) except KeyboardInterrupt: # pragma: no cover log.info("Stopped by keyboard")
def create_task( project_scaffold: Project, name: str, services: List[str], auth: str, force: bool, add_tests: bool, ) -> None: path = project_scaffold.p_path("backend", "tasks") path = path.joinpath(f"{name}.py") templating = Templating() create_template( templating, "task_template.py", path, name, services, auth, force, project_scaffold.project, ) log.info("Task created: {}", path) if add_tests: log.warning("Tests for tasks not implemented yet")
def clone(online_url, path, branch='master', do=False, check=True, expand_path=True): if expand_path: local_path = os.path.join(os.curdir, SUBMODULES_DIR, path) else: local_path = path local_path_exists = os.path.exists(local_path) if local_path_exists: log.debug("Path {} already exists", local_path) gitobj = Repo(local_path) elif do: gitobj = Repo.clone_from(url=online_url, to_path=local_path) log.info("Cloned repo {}@{} as {}", online_url, branch, path) else: log.exit("Repo {} missing as {}. You should init your project".format( online_url, local_path)) if do: switch_branch(gitobj, branch) if check: compare_repository(gitobj, branch, online_url=online_url, path=path) return gitobj
def exec_command(capfd, command, get_out=True, get_err=False): command = command.split(" ") arguments = ArgParser(args=command) try: Application(arguments) # NOTE: docker-compose calls SystemExit at the end of the command... except SystemExit: log.info('completed') out, err = capfd.readouterr() out = out.replace('\r', '').split("\n") err = err.replace('\r', '').split("\n") if get_out: pp(out) if get_err: pp(err) if get_out and get_err: return out, err if get_out: return out if get_err: return err return None
def download(url: str, expected_checksum: str) -> Path: try: r = requests.get(url, timeout=10) if r.status_code != 200: print_and_exit("Can't download {}, invalid status code {}", url, str(r.status_code)) file: Path = Path(tempfile.NamedTemporaryFile().name) with open(file, "wb") as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) md5 = "N/A" with open(file, "rb") as f: md5 = hashlib.md5(f.read()).hexdigest() if md5 == expected_checksum: log.info("Checksum verified: {}", md5) else: print_and_exit( "File checksum ({}) does not match the expected value ({})", md5, expected_checksum, ) return file except requests.exceptions.ReadTimeout as e: # pragma: no cover print_and_exit( "The request timed out, please retry in a while ({})", str(e))
def restore(container: Optional[Tuple[str, str]], backup_file: str, force: bool) -> None: if container and not force: print_and_exit( "RabbitMQ is running and the restore will temporary stop it. " "If you want to continue add --force flag") docker = Docker() if container: docker.remove(SERVICE_NAME) backup_path = f"/backup/{SERVICE_NAME}/{backup_file}" command = f"tar -xf {backup_path} -C /var/lib/rabbitmq/" log.info("Starting restore on {}...", SERVICE_NAME) docker.compose.create_volatile_container(SERVICE_NAME, command=command) log.info("Restore from data{} completed", backup_path) if container: docker.start(SERVICE_NAME)
def update( ignore_submodules: List[str] = typer.Option( [], "--ignore-submodule", "-i", help="Ignore a submodule", show_default=False, shell_complete=Application.autocomplete_submodule, ), ) -> None: Application.print_command( Application.serialize_parameter("--ignore-submodule", ignore_submodules), ) Application.get_controller().controller_init() Application.git_update(ignore_submodules) # Reading again the configuration, it may change with git updates Application.get_controller().read_specs(read_extended=True) Application.get_controller().make_env() # Compose services and variables base_services, config = Application.get_controller( ).get_compose_configuration() active_services = services.find_active(config) Application.get_controller().check_placeholders_and_passwords( config, active_services) log.info("All updated")
def render(self, filename, data, outdir='custom', template_filename=None): mypath = Path(outdir, filename) if self.file_exists_and_nonzero(mypath): # # if you do not want to overwrite # log.warning("{} already exists", filename) # return False log.info("{} already exists. Overwriting.", filename) filepath = str(mypath) abs_path = os.path.dirname(os.path.realpath(__file__)) template_dir = os.path.join(abs_path, TEMPLATE_DIR) if template_filename is None: template_filename = filename # Simplify the usage of jinja2 templating. # https://www.pydanny.com/jinja2-quick-load-function.html loader = FileSystemLoader(template_dir) env = Environment(loader=loader) template = env.get_template(template_filename) templated_content = template.render(**data) self.save_template(filepath, templated_content) # NOTE: this below has to be INFO, # otherwise the user doesn't get info on what paths were created log.info("rendered {}", filepath) return True
def clone( url: str, path: Path, branch: str, do: bool = False, check: bool = True ) -> Repo: local_path = SUBMODULES_DIR.joinpath(path) if local_path.exists(): log.debug("Path {} already exists", local_path) gitobj = Repo(local_path) elif do: gitobj = Repo.clone_from(url=url, to_path=local_path) log.info("Cloned {}@{} as {}", url, branch, path) else: print_and_exit( "Repo {} missing as {}. You should init your project", url, local_path, ) if do: ret = switch_branch(gitobj, branch) if not ret: # pragma: no cover print_and_exit("Cannot switch repo {} to version {}", local_path, branch) if check: compare_repository(gitobj, branch, online_url=url) return gitobj
def init(self) -> None: manager_address = str( Application.env.get("SWARM_MANAGER_ADDRESS") or system.get_local_ip(Configuration.production)) log.info("Initializing Swarm with manager IP {}", manager_address) self.docker.swarm.init(advertise_address=manager_address)
def install_controller_from_git(version: str) -> None: controller = f"git+https://github.com/rapydo/do.git@{version}" log.info("You asked to install rapydo {} from git", version) Packages.install(controller, editable=False) log.info("Controller version {} installed from git", version)
def status(self, services: List[str]) -> None: print("") prefix = f"{Configuration.project}{COMPOSE_SEP}" table: List[List[str]] = [] for container in self.docker.compose.ps(): name = container.name if not name.startswith(prefix): continue # to be replaced with removeprefix name = name[len(prefix):] if COMPOSE_SEP in name: name = name[0:name.index(COMPOSE_SEP)] if name not in services: continue status = container.state.status if status == "shutdown" or status == "complete": COLOR = colors.BLUE elif status == "running": COLOR = colors.GREEN elif status == "starting" or status == "ready": COLOR = colors.YELLOW elif status == "failed": COLOR = colors.RED else: COLOR = colors.RESET ports_list = [] for container_port, host_port in container.network_settings.ports.items( ): if host_port: container_port = container_port.split("/")[0] ports_list.append( f"{container_port}->{host_port[0]['HostPort']}") table.append([ container.id[0:12], f"{COLOR}{container.name}{colors.RESET}", status, container.created.strftime("%d-%m-%Y %H:%M:%S"), container.config.image, ",".join(ports_list), ], ) if not table: log.info("No container is running") else: print( tabulate( table, tablefmt=TABLE_FORMAT, headers=[ "ID", "NAME", "STATUS", "CREATED", "IMAGE", "PORTS" ], ))
def check_internet_connection() -> None: """Check if connected to internet""" try: requests.get("https://www.google.com", timeout=2) if Configuration.check: log.info("Internet connection is available") except requests.ConnectionError: # pragma: no cover print_and_exit("Internet connection is unavailable")
def backup(container: Optional[Tuple[str, str]], now: datetime, force: bool, dry_run: bool) -> None: if not container: print_and_exit( "The backup procedure requires {} running, please start your stack", SERVICE_NAME, ) docker = Docker() log.info("Starting backup on {}...", SERVICE_NAME) tmp_backup_path = f"/tmp/{now}" command = f"sh -c 'mariabackup --backup --target-dir={tmp_backup_path} " command += '-uroot -p"$MYSQL_ROOT_PASSWORD"\'' # Creating backup on a tmp folder as mysql user if not dry_run: docker.exec_command(container, user="******", command=command) # Creating backup on a tmp folder as mysql user if not dry_run: log.info("Executing mariabackup...") docker.exec_command( container, user="******", command= f"sh -c 'mariabackup --prepare --target-dir={tmp_backup_path}'", ) # Compress the prepared data folder. Used -C to skip the /tmp from folders paths if not dry_run: log.info("Compressing the backup file...") docker.exec_command( container, user="******", command=f"tar -zcf {tmp_backup_path}.tar.gz -C /tmp {now}", ) # Verify the gz integrity if not dry_run: log.info("Verifying the integrity of the backup file...") docker.exec_command(container, user="******", command=f"gzip -t {tmp_backup_path}.tar.gz") # Move the backup from /tmp to /backup (as root user) backup_path = f"/backup/{SERVICE_NAME}/{now}.tar.gz" if not dry_run: docker.exec_command( container, user="******", command=f"mv {tmp_backup_path}.tar.gz {backup_path}", ) log.info("Backup completed: data{}", backup_path)
def dump() -> None: Application.print_command() Application.get_controller().controller_init() docker = Docker() docker.compose.dump_config( Application.data.services, v1_compatibility=not Configuration.swarm_mode ) log.info("Config dump: {}", COMPOSE_FILE)
def info(self): infos = '\n' base_endpoint = False endpoint = self.endpoint_name # look inside extended swagger definition backend = self.backend_dir needle = self.find_swagger(endpoint, backend) # or look inside base swagger definition of rapydo if needle is None: backend = self.base_backend_dir needle = self.find_swagger(endpoint, backend) base_endpoint = True python_file_dir = Path(backend, 'resources') else: python_file_dir = Path(backend, ENDPOINTS_CODE_DIR) if needle is None: log.exit('No endpoint "{}" found in current swagger definition', endpoint) current_dir = Path.cwd() uri = Path(needle.get('baseuri', '/api'), endpoint) infos += 'Endpoint path:\t{}\n'.format(uri) swagger_dir = Path(current_dir, backend, SWAGGER_DIR, needle.get('swagger')) infos += 'Swagger path:\t{}/\n'.format(swagger_dir) infos += 'Labels:\t\t{}\n'.format(", ".join(needle.get('labels'))) python_file_path = Path(current_dir, python_file_dir, needle.get('file') + '.py') infos += 'Python file:\t{}\n'.format(python_file_path) python_class = needle.get('class') infos += 'Python class:\t{}\n'.format(python_class) log.info("Informations about '{}':\n{}", endpoint, infos) if base_endpoint: log.warning( "This is a BASE endpoint of the RAPyDo framework.\n" + "Do not modify it unless your are not a RAPyDo developer.") with open(str(python_file_path)) as fh: content = fh.read() clstest = 'class {}('.format(python_class) if clstest not in content: log.critical("Class '{}' definition not found in python file", python_class)
def stop(services: List[str] = typer.Argument( None, help="Services to be stopped", shell_complete=Application.autocomplete_service, )) -> None: Application.print_command(Application.serialize_parameter("", services)) Application.get_controller().controller_init(services) docker = Docker() docker.client.compose.stop(Application.data.services) log.info("Stack stopped")
def wait_stack_deploy(docker: Docker) -> None: MAX = 60 for i in range(0, MAX): try: if docker.get_running_services(): break log.info("Stack is still starting, waiting... [{}/{}]", i + 1, MAX) time.sleep(1) # Can happens when the stack is near to be deployed except DockerException: # pragma: no cover pass
def read_specs(self, read_extended: bool = True) -> None: """Read project configuration""" try: confs = configuration.read_configuration( default_file_path=CONFS_DIR, base_project_path=Configuration.ABS_PROJECT_PATH, projects_path=PROJECT_DIR, submodules_path=SUBMODULES_DIR, read_extended=read_extended, production=Configuration.production, ) # confs 3 is the core config, extra fields are allowd configuration.validate_configuration(confs[3], core=True) # confs 0 is the merged conf core + custom, extra fields are allowd configuration.validate_configuration(confs[0], core=False) log.info("Project configuration is valid") Configuration.specs = configuration.mix_configuration( confs[0], Configuration.host_configuration) configuration.validate_configuration(Configuration.specs, core=False) log.info("Host configuration is valid") self.extended_project = confs[1] self.extended_project_path = confs[2] except AttributeError as e: # pragma: no cover print_and_exit(str(e)) Configuration.frontend = cast( str, (Configuration.specs.get("variables", {}).get("env", {}).get( "FRONTEND_FRAMEWORK", NO_FRONTEND)), ) if Configuration.frontend == NO_FRONTEND: Configuration.frontend = None project = Configuration.specs.get("project", {}) Configuration.project_title = project.get("title", "Unknown title") Configuration.version = project.get("version", "") Configuration.rapydo_version = project.get("rapydo", "") Configuration.project_description = project.get( "description", "Unknown description") Configuration.project_keywords = project.get("keywords", "") if not Configuration.rapydo_version: # pragma: no cover print_and_exit( "RAPyDo version not found in your project_configuration file") Configuration.rapydo_version = str(Configuration.rapydo_version)
def start_containers( self, services: List[str], force: bool = False, scales: Optional[Dict[str, int]] = None, ) -> None: if scales: # Based on rapydo scale implementation services is always a 1-length list service = services[0] nreplicas = scales.get(service, 0) services_list = f"{service}={nreplicas}" log.info("Scaling services: {}...", services_list) else: services_list = ", ".join(services) scales = {} log.info("Starting services: {}...", services_list) self.docker.compose.up( services=services, build=False, detach=True, abort_on_container_exit=False, force_recreate=force, scales=scales, ) if scales: log.info("Services scaled: {}", services_list) else: log.info("Services started: {}", services_list)
def backup(container: Optional[Tuple[str, str]], now: datetime, force: bool, dry_run: bool) -> None: if container and not force: print_and_exit( "RabbitMQ is running and the backup will temporary stop it. " "If you want to continue add --force flag") docker = Docker() if container and not dry_run: docker.remove(SERVICE_NAME) backup_path = f"/backup/{SERVICE_NAME}/{now}.tar.gz" log.info("Starting backup on {}...", SERVICE_NAME) if not dry_run: log.info("Executing rabbitmq mnesia...") docker.compose.create_volatile_container( SERVICE_NAME, command=f"tar -zcf {backup_path} -C /var/lib/rabbitmq mnesia") # Verify the gz integrity if not dry_run: log.info("Verifying the integrity of the backup file...") docker.compose.create_volatile_container( SERVICE_NAME, command=f"gzip -t {backup_path}") log.info("Backup completed: data{}", backup_path) if container and not dry_run: docker.start(SERVICE_NAME)
def git_update(ignore_submodule: List[str]) -> None: for name, gitobj in Application.gits.items(): if name in ignore_submodule: log.debug("Skipping update on {}", name) continue if gitobj and not git.can_be_updated(name, gitobj): print_and_exit("Can't continue with updates") controller_is_updated = False for name, gitobj in Application.gits.items(): if name in ignore_submodule: continue if name == "do": controller_is_updated = True if gitobj: git.update(name, gitobj) if controller_is_updated: installation_path = Packages.get_installation_path("rapydo") # Can't be tested on GA since rapydo is alway installed from a folder if not installation_path: # pragma: no cover log.warning("Controller is not installed in editable mode, " "rapydo is unable to update it") elif Application.gits["do"].working_dir: do_dir = Path(Application.gits["do"].working_dir) if do_dir.is_symlink(): do_dir = do_dir.resolve() # This can be used starting from py39 # do_dir = do_dir.readlink() if do_dir == installation_path: log.info("Controller installed from {} and updated", installation_path) else: log.warning( "Controller not updated because it is installed outside this " "project. Installation path is {}, the current folder is {}", installation_path, do_dir, ) else: # pragma: no cover log.warning("Controller submodule folder can't be found")
def working_clone(name: str, repo: configuration.Submodule, from_path: Optional[Path] = None) -> Optional[GitRepo]: # substitute values starting with '$$' myvars = { ANGULAR: Configuration.frontend == ANGULAR, } condition = repo.get("_if", "") if condition.startswith("$$"): # Is this repo enabled? if not myvars.get(condition.lstrip("$"), None): return None default_version = (Configuration.rapydo_version if Configuration.rapydo_version else __version__) if from_path is not None: local_path = from_path.joinpath(name) if not local_path.exists(): print_and_exit("Submodule {} not found in {}", name, local_path) submodule_path = Path(SUBMODULES_DIR, name) if submodule_path.exists(): log.info("Path {} already exists, removing", submodule_path) if submodule_path.is_dir() and not submodule_path.is_symlink(): shutil.rmtree(submodule_path) else: submodule_path.unlink() os.symlink(local_path, submodule_path) url = repo.get("online_url") if not url: # pragma: no cover print_and_exit( "Submodule misconfiguration, online url not found: {}", name) return git.clone( url=url, path=Path(name), branch=repo.get("branch") or default_version, do=Configuration.initialize, check=not Configuration.install, )
def wait_network_removal(docker: Docker, network: str) -> None: MAX = 30 for i in range(0, MAX): try: for n in docker.client.network.list(): if n.driver == "overlay" and n.name == network: break else: break log.info("Stack is still removing, waiting... [{}/{}]", i + 1, MAX) time.sleep(2) # Can happens when the network is near to be removed and # returned by list but no longer available for inspect # It is assumed to be removed except DockerException: # pragma: no cover break
def load_yaml_file(file, path, keep_order=False, is_optional=False): """ Import any data from a YAML file. """ filepath = get_yaml_path(file, path=path) if filepath is None: if is_optional: log.info( "Failed to read YAML file {}/{}: File does not exist", path, file, ) else: log.exit( "Failed to read YAML file {}/{}: File does not exist", path, file, ) return {} with open(filepath) as fh: try: if keep_order: OrderedLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) loader = yaml.load_all(fh, OrderedLoader) else: loader = yaml.load_all(fh, yaml.loader.Loader) docs = list(loader) if len(docs) == 0: log.exit("YAML file is empty: {}", filepath) return docs[0] except Exception as e: # # IF dealing with a strange exception string (escaped) # import codecs # error, _ = codecs.getdecoder("unicode_escape")(str(error)) log.warning("Failed to read YAML file [{}]: {}", filepath, e) return {}
def backup( container: Optional[Tuple[str, str]], now: datetime, force: bool, dry_run: bool ) -> None: if not container: print_and_exit( "The backup procedure requires {} running, please start your stack", SERVICE_NAME, ) docker = Docker() log.info("Starting backup on {}...", SERVICE_NAME) # This double step is required because postgres user is uid 70 # It is not fixed with host uid as the other SERVICE_NAMEs tmp_backup_path = f"/tmp/{now}.sql" # Creating backup on a tmp folder as postgres user if not dry_run: log.info("Executing pg_dumpall...") docker.exec_command( container, user="******", command=f"pg_dumpall --clean -U sqluser -f {tmp_backup_path}", ) # Compress the sql with best compression ratio if not dry_run: log.info("Compressing the backup file...") docker.exec_command( container, user="******", command=f"gzip -9 {tmp_backup_path}" ) # Verify the gz integrity if not dry_run: log.info("Verifying the integrity of the backup file...") docker.exec_command( container, user="******", command=f"gzip -t {tmp_backup_path}.gz" ) # Move the backup from /tmp to /backup (as root user) backup_path = f"/backup/{SERVICE_NAME}/{now}.sql.gz" if not dry_run: docker.exec_command( container, user="******", command=f"mv {tmp_backup_path}.gz {backup_path}" ) log.info("Backup completed: data{}", backup_path)
def main(): pretty_errors # pylint:disable=pointless-statement try: # imported here to avoid uncatched Keyboard Interruptions from controller.arguments import ArgParser arguments = ArgParser() from controller.app import Application Application(arguments) except KeyboardInterrupt: log.info("Interrupted by the user") except NotImplementedError as e: print('NOT IMPLEMENTED (yet): {}'.format(e)) else: log.verbose("Application completed")
def execute_command(command: str, parameters: List[str]) -> str: try: # Pattern in plumbum library for executing a shell command local_command = local[command] log.info("Executing command {} {}", command, " ".join(parameters)) return str(local_command(parameters)) except CommandNotFound: raise ExecutionException(f"Command not found: {command}") except ProcessExecutionError: raise ExecutionException( f"Cannot execute command: {command} {' '.join(parameters)}") # raised on Windows except OSError: # pragma: no cover raise ExecutionException(f"Cannot execute: {command}")
def install_compose() -> None: cli_plugin = Path.home().joinpath(".docker", "cli-plugins") cli_plugin.mkdir(parents=True, exist_ok=True) compose_bin = cli_plugin.joinpath("docker-compose") url = "https://github.com/docker/compose/releases/download/" url += f"{COMPOSE_VERSION}/docker-compose-linux-x86_64" log.info("Downloading compose binary: {}", url) f = Packages.download(url, EXPECTED_COMPOSE_BIN_MD5) f.rename(compose_bin) compose_bin.chmod(compose_bin.stat().st_mode | stat.S_IEXEC) if docker.compose.is_installed(): log.info("Docker compose is installed") else: # pragma: no cover log.error("Docker compose is NOT installed")