def get_configuration_dir_state(self) -> ConfigurationDirState: """Gets the state of the configuration, for use in validating whether a command can be used or not. Returns: ConfigurationDirState: The state of the configuration. """ try: generated_configuration_dir = self.get_generated_configuration_dir( ) except AttributeError: # If configuration_dir is None (like when we do an 'install'), then this raises AttributeError exception. # We cannot determine the generated_configuration_dir, so set it to None. generated_configuration_dir = None configuration_dir_state: ConfigurationDirState = ( ConfigurationDirStateFactory.get_state( self.configuration_dir, generated_configuration_dir, self.app_version, self.backup_dir, )) logger.debug( f"Derived configuration state [{configuration_dir_state}]") return configuration_dir_state
def commit_changes(self, message: str) -> bool: """Commit the existing changes to the git repository Args: message (str): The commit message to use Returns: bool: True if a commit was made, otherwise False. """ # If the repo isn't dirty, don't commit if not self.repo.is_dirty(untracked_files=True): logger.debug( "No changes found in repository [%s], no commit was made.", self.repo.working_dir, ) return False logger.debug( "Changes found in repository [%s], making new commit.", self.repo.working_dir, ) self.repo.git.add(".gitignore") self.repo.git.add("*") self.repo.index.commit(message, author=self.actor) return True
def __shutdown(self, ctx: Context, service_name: str = None): """Shutdown service(s) using the orchestrator. Args: ctx (Context): Click Context for current CLI. force (bool, optional): If True, forcibly shuts down service(s). Defaults to False. service_name (str, optional): The name of the service to shutdown. If not provided, will shut down all services. """ cli_context: CliContext = ctx.obj cli_context.get_configuration_dir_state().verify_command_allowed( AppcliCommand.SERVICE_SHUTDOWN) hooks = self.cli_configuration.hooks logger.debug("Running pre-shutdown hook") hooks.pre_shutdown(ctx) logger.info("Shutting down %s ...", self.cli_configuration.app_name) result = self.orchestrator.shutdown(ctx.obj, service_name) logger.debug("Running post-shutdown hook") hooks.post_shutdown(ctx, result) logger.info("Shutdown command finished with code [%i]", result.returncode) sys.exit(result.returncode)
def check_environment_variable_defined(env_variables: Iterable[str], error_message_template: str, exit_message: str) -> bool: """Check if environment variables are defined Args: env_variables (Iterable[str]): the environment variables to check error_message_template (str): a template for the error message exit_message (str): the exit message on error Returns: [bool]: True if all environment variables are defined, otherwise False. """ result = True for env_variable in env_variables: value = os.environ.get(env_variable) if value is None: logger.error(error_message_template, env_variable, env_variable) result = False else: logger.debug( f"Confirmed environment variable is set - '{env_variable}' = '{value}'" ) if not result: logger.error(exit_message) return result
def __shutdown(self, ctx: Context, force: bool = False, service_name: str = None): """Shutdown service(s) using the orchestrator. Args: ctx (Context): Click Context for current CLI. force (bool, optional): If True, forcibly shuts down service(s). Defaults to False. service_name (str, optional): The name of the service to shutdown. If not provided, will shut down all services. """ hooks = self.cli_configuration.hooks logger.debug("Running pre-shutdown hook") hooks.pre_shutdown(ctx) cli_context: CliContext = ctx.obj self.__pre_shutdown_validation(cli_context, force=force) logger.info("Shutting down %s ...", self.cli_configuration.app_name) result = self.orchestrator.shutdown(ctx.obj, service_name) logger.debug("Running post-shutdown hook") hooks.post_shutdown(ctx, result) logger.info("Shutdown command finished with code [%i]", result.returncode) sys.exit(result.returncode)
def should_run(self) -> bool: """ Verify if the backup should run based on todays date and the frequency value set. Returns: True if the frequency matches today, False if it does not. """ # Our configuration is just the last 3 values of a cron pattern, prepend hour/minute as wild-cards. cron_frequency = f"* * {self.frequency}" try: job = cronex.CronExpression(cron_frequency) except ValueError as e: logger.error( f"Frequency for remote strategy [{self.name}] is not valid [{self.frequency}]. [{e}]" ) return False if not job.check_trigger(time.gmtime(time.time())[:5]): logger.debug( f"Backup strategy [{self.name}] will not run due to frequency [{self.frequency}] not matching today." ) return False return True
def __create_new_configuration_branch_and_files(self): app_version: str = self.cli_context.app_version app_version_branch: str = self.config_repo.generate_branch_name(app_version) # Try to get an existing key path_to_key_file = self.cli_context.get_key_file() key_file_contents = None if path_to_key_file.exists(): key_file_contents = path_to_key_file.read_bytes() # Create a new branch for this current application version self.config_repo.checkout_new_branch_from_master(app_version_branch) # If the keyfile already exists, re-use it across branches. Otherwise create a new keyfile. if key_file_contents: logger.debug("Found existing key. Copied to new configuration branch") path_to_key_file.write_bytes(key_file_contents) else: logger.debug("No key found. Creating new key file") crypto.create_and_save_key(path_to_key_file) # Seed the configuration directory self.__seed_configuration_dir() # Commit the changes, and tag as $VERSION self.config_repo.commit_changes( f"Default configuration at version [{app_version}]" ) self.config_repo.tag_current_commit(f"{app_version}")
def rename_current_branch(self, branch_name: str): """Renames the current branch Args: branch_name (str): the new branch name """ self.repo.git.branch(m=branch_name) logger.debug(f"Renamed branch to [{branch_name}]")
def decrypt_docker_compose_files( cli_context: CliContext, docker_compose_file_relative_path: Path, docker_compose_override_directory_relative_path: Path, ) -> List[Path]: """Decrypt docker-compose and docker-compose override files. Args: cli_context (CliContext): The current CLI context. docker_compose_file_relative_path (Path): The relative path to the docker-compose file. Path is relative to the generated configuration directory. docker_compose_override_directory_relative_path (Path): The relative path to a directory containing docker-compose override files. Path is relative to the generated configuration directory. Returns: List[Path]: sorted list of absolute paths to decrypted docker-compose files. The first path is the decrypted docker-compose file, and the rest of the paths are the alphanumerically sorted docker compose override files in the docker compose override directory. """ compose_files = [] if docker_compose_file_relative_path is not None: docker_compose_file = cli_context.get_generated_configuration_dir().joinpath( docker_compose_file_relative_path ) if os.path.isfile(docker_compose_file): compose_files.append(docker_compose_file) if docker_compose_override_directory_relative_path is not None: docker_compose_override_directory = ( cli_context.get_generated_configuration_dir().joinpath( docker_compose_override_directory_relative_path ) ) if os.path.isdir(docker_compose_override_directory): docker_compose_override_files: List[Path] = [ Path(os.path.join(docker_compose_override_directory, file)) for file in os.listdir(docker_compose_override_directory) if os.path.isfile(os.path.join(docker_compose_override_directory, file)) ] if len(docker_compose_override_files) > 0: docker_compose_override_files.sort() logger.debug( "Detected docker compose override files [%s]", docker_compose_override_files, ) compose_files.extend(docker_compose_override_files) # decrypt files if key is available key_file = cli_context.get_key_file() decrypted_files = [ decrypt_file(encrypted_file, key_file) for encrypted_file in compose_files ] return decrypted_files
def __save(self, variables: Dict): """Saves the supplied Dict of variables to the configuration file Args: variables (Dict): the variables to save """ full_path = self.configuration_file.absolute().as_posix() logger.debug(f"Saving configuration to [{full_path}] ...") with open(full_path, "w") as config_file: self.yaml.dump(variables, config_file)
def __generate_configuration_metadata_file(self): record = { "generated_at": datetime.utcnow().replace(tzinfo=timezone.utc).isoformat(), "generated_from_commit": self.config_repo.get_current_commit_hash(), } configuration_record_file = self.__get_generated_configuration_metadata_file( self.cli_context ) # Overwrite the existing generated configuration metadata record file configuration_record_file.write_text( json.dumps(record, indent=2, sort_keys=True) ) logger.debug("Configuration record written to [%s]", configuration_record_file)
def _action_orchestrator( self, ctx: Context, action: ServiceAction, service_names: tuple[str, ...] = None, ): """Applies an action to service(s). Args: ctx (Context): Click Context for current CLI. action (ServiceAction): action to apply to service(s), ie start, stop ... service_names (tuple[str, ...], optional): The name(s) of the service(s) to effect. If not provided the action applies to all services. """ hooks = self.cli_configuration.hooks if action == ServiceAction.START: action_run_function = self.orchestrator.start pre_hook = hooks.pre_start post_hook = hooks.post_start elif action == ServiceAction.SHUTDOWN: action_run_function = self.orchestrator.shutdown pre_hook = hooks.pre_shutdown post_hook = hooks.post_shutdown else: error_and_exit(f"Unhandled action called: [{action.name}]") pre_run_log_message = ( f"{action.name} " + ( ", ".join(service_names) if service_names is not None and len(service_names) > 0 else self.cli_configuration.app_name ) + " ..." ) post_run_log_message = f"{action.name} command finished with code [%i]" logger.debug(f"Running pre-{action.name} hook") pre_hook(ctx) logger.info(pre_run_log_message) result = action_run_function(ctx.obj, service_names) logger.debug(f"Running post-{action.name} hook") post_hook(ctx, result) logger.info(post_run_log_message, result.returncode) sys.exit(result.returncode)
def execute_compose( cli_context: CliContext, command: Iterable[str], docker_compose_file_relative_path: Path, docker_compose_override_directory_relative_path: Path, ) -> CompletedProcess: """Builds and executes a docker-compose command. Args: cli_context (CliContext): The current CLI context. command (Iterable[str]): The command to execute with docker-compose. docker_compose_file_relative_path (Path): The relative path to the docker-compose file. Path is relative to the generated configuration directory. docker_compose_override_directory_relative_path (Path): The relative path to a directory containing docker-compose override files. Path is relative to the generated configuration directory. Returns: CompletedProcess: The completed process and its exit code. """ docker_compose_command = [ "docker-compose", "--project-name", cli_context.get_project_name(), ] compose_files = decrypt_docker_compose_files( cli_context, docker_compose_file_relative_path, docker_compose_override_directory_relative_path, ) if len(compose_files) == 0: logger.error( "No valid docker compose files were found. Expected file [%s] or files in directory [%s]", docker_compose_file_relative_path, docker_compose_override_directory_relative_path, ) return CompletedProcess(args=None, returncode=1) for compose_file in compose_files: docker_compose_command.extend(("--file", str(compose_file))) if command is not None: docker_compose_command.extend(command) logger.debug("Running [%s]", " ".join(docker_compose_command)) result = run(docker_compose_command) return result
def __regenerate_generated_configuration(self): """Generate the generated configuration files""" print_header("Generating configuration files") generated_configuration_dir = self.__backup_and_create_new_generated_config_dir( self.config_repo.get_repository_version() ) logger.info("Generating configuration from default templates") self.__apply_templates_from_directory( self.cli_configuration.baseline_templates_dir, generated_configuration_dir ) logger.info("Generating configuration from override templates") self.__apply_templates_from_directory( self.cli_context.get_baseline_template_overrides_dir(), generated_configuration_dir, ) logger.info("Generating configuration from configurable templates") self.__apply_templates_from_directory( self.cli_context.get_configurable_templates_dir(), generated_configuration_dir, ) files_to_decrypt = self.cli_configuration.decrypt_generated_files if len(files_to_decrypt) > 0: self.__decrypt_generated_files( self.cli_context.get_key_file(), self.cli_context.get_generated_configuration_dir(), files_to_decrypt, ) # Copy the settings file that was used to generate the templates self.__copy_settings_files_to_generated_dir() # Generate the metadata file self.__generate_configuration_metadata_file() # By re-instantiating the 'GeneratedConfigurationGitRepository', we put # the generated config repo under version control. generated_config_repo = GeneratedConfigurationGitRepository( self.cli_context.get_generated_configuration_dir() ) logger.debug( f"Generated configuration at [{generated_config_repo.get_repo_path()}]" )
def set(ctx: Context, type: str, encrypted: bool, setting: str, value: str = None): """Set a configuration value, with specified type, and optional encryption. If the 'value' isn't passed in, then the user will be prompted. This is useful in the case where the value is sensitive and shouldn't be captured in terminal history. Note - appcli does not currently support encrypting non-string-typed values. Args: ctx (Context): Click Context for current CLI. type (str): Transform the input value as type encrypted (Bool, flag): flag to indicate if value should be encrypted setting (str): setting to set value (str, optional): value to assign to setting """ cli_context: CliContext = ctx.obj cli_context.get_configuration_dir_state().verify_command_allowed( AppcliCommand.CONFIGURE_SET) # Check if value was not provided if value is None: value = click.prompt("Please enter a value", type=str) # Transform input value as type transformed_value = StringTransformer.transform(value, type) # We don't support encrypting non-string-typed values yet, so error and exit. if encrypted and not isinstance(transformed_value, str): error_and_exit( "Cannot encrypt a non-string-typed value. Exiting without setting value." ) # Set settings value final_value = (encrypt_text(cli_context, transformed_value) if encrypted else transformed_value) configuration = ConfigurationManager(cli_context, self.cli_configuration) configuration.set_variable(setting, final_value) logger.debug( f"Successfully set variable [{setting}] to [{'### Encrypted Value ###' if encrypted else value}]." )
def restore(self, ctx, backup_filename: Path): """Restore application data and configuration from the provided local backup `.tgz` file. This will create a backup of the existing data and config, remove the contents `conf`, `data` and `conf/.generated` and then extract the backup to the appropriate locations. `conf`, `data` and `conf/.generated` are mapped into appcli which means we keep the folder but replace their contents on restore. Args: backup_filename (string): The name of the file to use in restoring data. The path of the file will be pulled from `CliContext.obj.backup_dir`. """ cli_context: CliContext = ctx.obj logger.info( f"Initiating system restore with backup [{backup_filename}]") # Check that the backup file exists. backup_dir: Path = cli_context.backup_dir backup_name: Path = Path(os.path.join(backup_dir, backup_filename)) if not backup_name.is_file(): error_and_exit(f"Backup file [{backup_name}] not found.") # Perform a backup of the existing application config and data. logger.debug("Backup existing application data and configuration") restore_backup_name = self.backup( ctx, allow_rolling_deletion=False ) # False ensures we don't accidentally delete our backup logger.debug( f"Backup(s) complete. Generated backups: [{restore_backup_name}]") # Extract conf and data directories from the tar. # This will overwrite the contents of each directory, anything not in the backup (such as files matching the exclude glob patterns) will be left alone. try: with tarfile.open(backup_name) as tar: conf_dir: Path = cli_context.configuration_dir tar.extractall(conf_dir, members=self.__members( tar, os.path.basename(conf_dir))) data_dir: Path = cli_context.data_dir tar.extractall(data_dir, members=self.__members( tar, os.path.basename(data_dir))) except Exception as e: logger.error(f"Failed to extract backup. Reason: {e}") logger.info("Restore complete.")
def install(ctx, install_dir: Path): cli_context: CliContext = ctx.obj cli_context.get_configuration_dir_state().verify_command_allowed( AppcliCommand.INSTALL) logger.info("Generating installer script ...") # Get the template from the appcli package launcher_template = pkg_resources.read_text( templates, INSTALLER_TEMPLATE_FILENAME) logger.debug(f"Read template file [{INSTALLER_TEMPLATE_FILENAME}]") environment: str = cli_context.environment target_install_dir: Path = install_dir / environment if cli_context.configuration_dir is None: cli_context = cli_context._replace( configuration_dir=target_install_dir / "conf") if cli_context.data_dir is None: cli_context = cli_context._replace( data_dir=target_install_dir / "data") if cli_context.backup_dir is None: cli_context = cli_context._replace( backup_dir=target_install_dir / "backup") render_variables = { "cli_context": cli_context, "configuration": self.configuration, "install_dir": f"{target_install_dir}", } logger.debug( f"Rendering template with render variables: [{render_variables}]" ) template = Template( launcher_template, undefined=StrictUndefined, trim_blocks=True, lstrip_blocks=True, ) try: output_text = template.render(render_variables) print(output_text) except Exception as e: error_and_exit( f"Could not generate file from template. The configuration file is likely missing a setting: {e}" )
def configure_default(self, app_name): """Applies the default opinionated configuration to Keycloak This does the following: - Creates a realm named '<app_name>' - For realm '<app_name>', creates a client with the name '<app_name>', which has an audience mapper to itself, and redirect URIs of ["*"] - For realm '<app_name>', creates a realm role '<app_name>-admin' - For realm '<app_name>', creates a user 'test.user' with password 'password', and assigns the realm role '<app_name>-admin' """ self.create_realm(app_name) logger.debug(f"Created realm [{app_name}]") client_payload = { "redirectUris": ["*"], "protocolMappers": [{ "name": f"{app_name}-audience", "protocol": "openid-connect", "protocolMapper": "oidc-audience-mapper", "consentRequired": "false", "config": { "included.client.audience": app_name, "id.token.claim": "false", "access.token.claim": "true", }, }], } self.create_client(app_name, app_name, client_payload) secret = self.get_client_secret(app_name, app_name) logger.debug(f"Created client [{app_name}] with secret [{secret}]") realm_role = f"{app_name}-admin" self.create_realm_role(app_name, realm_role) logger.debug(f"Created realm role [{realm_role}]") username = "******" self.create_user(app_name, username, "password", "Test", "User", "*****@*****.**") logger.debug( f"Created user [test.user] with password [password] in realm [{app_name}]" ) self.assign_realm_role(app_name, username, realm_role) logger.debug(f"Assigned realm role [{realm_role}] to user [test.user]")
def encrypt_text(cli_context, text: str): """Encrypts text using application key file. Args: cli_context (CliContext): the cli context text (str): the string to encrypt """ if text is None: raise ValueError("Text to encrypt cannot be 'None'") key_file: Path = cli_context.get_key_file() if not key_file.is_file(): logger.debug("Creating encryption key at [%s]", key_file) crypto.create_and_save_key(key_file) cipher = Cipher(key_file) return cipher.encrypt(text)
def verify_service_names( self, cli_context: CliContext, service_names: tuple[str, ...] ) -> bool: if service_names is None or len(service_names) == 0: return True subcommand = ["config", "--services"] result = self.__docker_stack(cli_context, subcommand) if result.returncode != 0: error_msg = result.stderr.decode() logger.error( f"An unexpected error occured while verifying services. Error: {error_msg}" ) return False # Converts the byte type into list of names, and removes trailing empty string valid_service_names = result.stdout.decode().split("\n")[:-1] logger.debug("Valid Services: %s", ", ".join(valid_service_names)) return service_name_verifier(service_names, valid_service_names)
def __decrypt_generated_files( self, key_file: Path, generated_config_dir: Path, files: Iterable[str] ): """ Decrypts the specified files in the generated configuration directory. The current encrypted version will be overwritten by the decrypted version. Args: key_file (Path): Key file to use when decrypting. generated_config_dir (Path): Path to the generated configuration directory. files (Iterable[str]): Relative path to the files to decrypt. Resolved against the generated configuration directory. """ for relative_file in files: # decrypt and overwrite the file target_file = generated_config_dir.joinpath(relative_file) logger.debug("Decrypting [%s] ...", target_file) decrypt_values_in_file(target_file, target_file, key_file)
def start(ctx, force, service_name): cli_context: CliContext = ctx.obj cli_context.get_configuration_dir_state().verify_command_allowed( AppcliCommand.SERVICE_START, force) hooks = self.cli_configuration.hooks logger.debug("Running pre-start hook") hooks.pre_start(ctx) logger.info("Starting %s ...", configuration.app_name) result = self.orchestrator.start(ctx.obj, service_name) logger.debug("Running post-start hook") hooks.post_start(ctx, result) logger.info("Start command finished with code [%i]", result.returncode) sys.exit(result.returncode)
def launcher(ctx): cli_context: CliContext = ctx.obj cli_context.get_configuration_dir_state().verify_command_allowed( AppcliCommand.LAUNCHER) logger.info("Generating launcher script ...") # Get the template from the appcli package launcher_template = pkg_resources.read_text( templates, LAUNCHER_TEMPLATE_FILENAME) logger.debug(f"Read template file [{LAUNCHER_TEMPLATE_FILENAME}]") render_variables = { "app_version": os.environ.get("APP_VERSION", "latest"), "app_name": configuration.app_name.upper(), "cli_context": ctx.obj, "configuration": self.configuration, "current_datetime": f"{datetime.datetime.utcnow().isoformat()}+00:00", # Since we're using utcnow(), we specify the offset manually } logger.debug( f"Rendering template with render variables: [{render_variables}]" ) template = Template( launcher_template, undefined=StrictUndefined, trim_blocks=True, lstrip_blocks=True, ) try: output_text = template.render(render_variables) print(output_text) except Exception as e: error_and_exit( f"Could not generate file from template. The configuration file is likely missing a setting: {e}" )
def start(ctx, force, service_name): hooks = self.cli_configuration.hooks # TODO: run self.cli_configuration.hooks.is_valid_variables() to confirm variables are valid logger.debug("Running pre-start hook") hooks.pre_start(ctx) cli_context: CliContext = ctx.obj self.__pre_start_validation(cli_context, force=force) logger.info("Starting %s ...", configuration.app_name) result = self.orchestrator.start(ctx.obj, service_name) logger.debug("Running post-start hook") hooks.post_start(ctx, result) logger.info("Start command finished with code [%i]", result.returncode) sys.exit(result.returncode)
def __rolling_backup_deletion(self, backup_dir: Path): """Delete old backups, will only keep the most recent backups. The number of backups to keep is specified in the stack settings configuration file. Note that the age of the backup is derived from the alphanumerical order of the backup filename. This means that any supplementary files in the backup directory could have unintended consequences during rolling deletion. Backup files are intentionally named with a datetime stamp to enable age ordering. Args: backup_dir (Path): The directory that contains the backups. """ # If the backup limit is set to 0 then we never want to delete a backup. if self.backup_limit == 0: logger.debug("Backup limit is 0 - skipping rolling deletion.") return logger.info( f"Removing old backups - retaining at least the last [{self.backup_limit}] backups ..." ) # Get all files from our backup directory backup_files = os.listdir(backup_dir) # Sort the backups alphanumerically by filename. Note that this assumes all files in the backup dir are backup # files that use a time-sortable naming convention. backup_dir_files = sorted( backup_files, reverse=True, ) # Get the backups to delete by taking our sorted list of backups and then delete from the appropriate index # onward. backups_to_delete = backup_dir_files[ self. backup_limit: # noqa: E203 - Disable flake8 error on spaces before a `:` ] for backup_to_delete in backups_to_delete: backup_file: Path = Path(os.path.join(backup_dir, backup_to_delete)) logger.info(f"Deleting backup file [{backup_file}]") os.remove(backup_file)
def decrypt_file(encrypted_file: Path, key_file: Path) -> Path: """ Decrypts the specified file using the supplied key. Args: encrypted_file (Path): File to decrypt. key_file (Path): Key to use for decryption. Returns: Path: Path to the decrypted file. """ if not key_file.is_file(): logger.info("No decryption key found. [%s] will not be decrypted.", encrypted_file) return encrypted_file logger.debug("Decrypting file [%s] using [%s].", str(encrypted_file), key_file) decrypted_file: Path = Path(NamedTemporaryFile(delete=False).name) crypto.decrypt_values_in_file(encrypted_file, decrypted_file, key_file) return decrypted_file
def __initialise_git_repo(self): """Initialise the git repository, create .gitignore if required, and commit the initial files Returns: git.Repo: The newly-created git repository """ logger.debug("Initialising repository at [%s] ...", self.repo_path) # git init, and write to the .gitignore file repo = git.Repo.init(self.repo_path) logger.debug("Initialised repository at [%s]", repo.working_dir) gitignore_path = self.repo_path.joinpath(".gitignore") with open(gitignore_path, "w") as ignore_file: for ignore in self.ignores: ignore_file.write(f"{ignore}\n") logger.debug( f"Created .gitignore at [{gitignore_path}] with ignores: [%s]", self.ignores ) repo.git.add(".gitignore") repo.git.add("*") repo.index.commit("[autocommit] Initialised repository", author=self.actor) return repo
def __copy_settings_files_to_generated_dir(self): """Copies the current settings file and encryption key to the generated directory as a record of what configuration was used to generate those files. """ logger.debug( "Copying applied settings file to generated configuration directory" ) generated_config_dir = self.cli_context.get_generated_configuration_dir() applied_configuration_file = generated_config_dir.joinpath( self.cli_context.get_app_configuration_file().name ) shutil.copy2( self.cli_context.get_app_configuration_file(), applied_configuration_file ) logger.debug("Copying applied key file to generated configuration directory") applied_key_file = generated_config_dir.joinpath( self.cli_context.get_key_file().name ) shutil.copy2(self.cli_context.get_key_file(), applied_key_file) logger.debug( "Applied settings and key file written to [%s] and [%s]", applied_configuration_file, applied_key_file, )
def __apply_templates_from_directory( self, template_path: Path, generated_configuration_dir: Path ): """Applies templates from a source directory to the generated directory Args: template_path (Path): directory to the templates generated_configuration_dir (Path): directory to output generated files """ for template_file in template_path.glob("**/*"): relative_file = template_file.relative_to(template_path) target_file = generated_configuration_dir.joinpath(relative_file) if template_file.is_dir(): logger.debug("Creating directory [%s] ...", target_file) target_file.mkdir(parents=True, exist_ok=True) continue if template_file.suffix == ".j2": # parse jinja2 templates against configuration target_file = target_file.with_suffix("") logger.debug("Generating configuration file [%s] ...", target_file) self.__generate_from_template( template_file, target_file, self.__get_variables_manager().get_all_variables(), ) else: logger.debug("Copying configuration file to [%s] ...", target_file) shutil.copy2(template_file, target_file)
def __determine_file_list_from_glob(self, path_to_backup: Path, globs: GlobList) -> Set[Path]: """ Determine the list of files to backup in the path provided based on the include/exclude lists in the provided GlobList Args: path_to_backup: (Path). The path to use when generating the list of files. globs: (GlobList). A GlobList which contains the include/exclude list used to filter the files found in the path. Returns: set(Path): A set of files that need to be backed up. """ # Get a set of files that should be included in the backup. included_globbed_files: set(Path) = self.__get_files_from_globs( path_to_backup, globs.include_list) logger.debug( f"Included files, glob: [{globs.include_list}], path: [{path_to_backup}], included files: [{included_globbed_files}]" ) # Get a set of files that should be excluded from the backup. excluded_globbed_files: set(Path) = self.__get_files_from_globs( path_to_backup, globs.exclude_list) logger.debug( f"Excluded files, glob: [{globs.exclude_list}], path: [{path_to_backup}], excluded files: [{excluded_globbed_files}]" ) # Determine the files that need to be backed up by removing the exclude set from the include set. filtered_files: set( Path) = included_globbed_files - excluded_globbed_files logger.debug(f"Final set of files to include: [{filtered_files}]") return filtered_files