def _process_configuration(self, project_and_group: str, configuration: dict): for tag in sorted(configuration["tags"]): try: if configuration["tags"][tag]["protected"]: create_access_level = ( configuration["tags"][tag]["create_access_level"] if "create_access_level" in configuration["tags"][tag] else None) debug("Setting tag '%s' as *protected*", tag) try: # try to unprotect first self.gitlab.unprotect_tag(project_and_group, tag) except NotFoundException: pass self.gitlab.protect_tag(project_and_group, tag, create_access_level) else: debug("Setting tag '%s' as *unprotected*", tag) self.gitlab.unprotect_tag(project_and_group, tag) except NotFoundException: message = f"Tag '{tag}' not found when trying to set it as protected/unprotected!" if self.strict: fatal( message, exit_code=EXIT_PROCESSING_ERROR, ) else: warning(message)
def _validate_required_to_delete(self, project_or_group: str, entity_name: str, entity_dict: dict): if not self.defining.contains(entity_dict): fatal( f"Entity {entity_name} in {self.configuration_name} for {project_or_group}" f" doesn't have some of its defining keys required to delete it: {self.defining.explain()}", exit_code=EXIT_INVALID_INPUT, )
def check_shallow_with_sha1(self, repo: tsrc.Repo) -> None: if not repo.sha1: return if self.shallow: message = textwrap.dedent( "Cannot use --shallow with a fixed sha1 ({repo.sha1})\n" "Consider using a tag instead") message = message.format(repo=repo) ui.fatal(message)
def _verify_if_groups_exist(self, groups: list): for group in groups: try: self.gitlab.get_group_case_insensitive(group) except NotFoundException: fatal( f"Configuration contains group {group} but it cannot be found in GitLab!", exit_code=EXIT_INVALID_INPUT, )
def init(working_path: Path, *, current_version: str) -> None: """ Interactively creates a new tbump.toml """ ui.info_1("Generating tbump config file") tbump_path = working_path / "tbump.toml" if tbump_path.exists(): ui.fatal(tbump_path, "already exists") template = textwrap.dedent("""\ # Uncomment this if your project is hosted on GitHub: # github_url = https://github.com/<user or organization>/<project>/ [version] current = "@current_version@" # Example of a semver regexp. # Make sure this matches current_version before # using tbump regex = ''' (?P<major>\\d+) \\. (?P<minor>\\d+) \\. (?P<patch>\\d+) ''' [git] message_template = "Bump to {new_version}" tag_template = "v{new_version}" """) file_template = textwrap.dedent(""" # For each file to patch, add a [[file]] config section containing # the path of the file, relative to the tbump.toml location. [[file]] src = "..." """) hooks_template = textwrap.dedent(""" # You can specify a list of commands to # run after the files have been patched # and before the git commit is made # [[before_commit]] # name = "check changelog" # cmd = "grep -q {new_version} Changelog.rst" # Or run some commands after the git tag and the branch # have been pushed: # [[after_push]] # name = "publish" # cmd = "./publish.sh" """) to_write = template.replace("@current_version@", current_version) to_write += file_template to_write += hooks_template tbump_path.write_text(to_write) ui.info_2(ui.check, "Generated tbump.toml")
def __init__(self, configuration, group_processors, project_processors): self.configuration = configuration self.group_processors = group_processors self.project_processors = project_processors if not self.configuration.get("projects_and_groups", {}): fatal( "Configuration has to contain non-empty 'projects_and_groups' key.", exit_code=EXIT_INVALID_INPUT, )
def _validate_required_to_create_or_update(self, project_or_group: str, entity_name: str, entity_dict: dict): if not self.required_to_create_or_update.contains(entity_dict): fatal( f"Entity {entity_name} in {self.configuration_name} for {project_or_group}" f" doesn't have some of its keys required to create or update:" f" {self.required_to_create_or_update.explain()}", exit_code=EXIT_INVALID_INPUT, )
def _find_duplicates(self, project_or_group: str, entities_in_configuration: dict): for first_key, first_value in entities_in_configuration.items(): for second_key, second_value in entities_in_configuration.items(): if first_key != second_key: if self.defining.matches(first_value, second_value): fatal( f"Entities {first_key} and {second_key} in {self.configuration_name} for {project_or_group}" f" are the same in terms of their defining keys: {self.defining.explain()}", exit_code=EXIT_INVALID_INPUT, )
def write_to_file(self): if self.output_file: try: yaml_configuration = ez_yaml.to_string(self.config) self.output_file.write(yaml_configuration) self.output_file.close() except Exception as e: fatal( f"Error when trying to write or close {self.output_file}: {e}", exit_code=EXIT_PROCESSING_ERROR, )
def deploy(args: argparse.Namespace) -> None: compiled_targets = [p.name for p in Path("native").iterdir() if p.is_dir()] missing_targets = [ target for target in TARGET_LIST if target not in compiled_targets ] if missing_targets: ui.fatal("Aborting deploy because of missing targets:", *missing_targets) version = args.version registry = args.registry tankerci.bump_files(version) tankerci.run("cargo", "publish", "--allow-dirty", f"--registry={registry}")
def unprotect_branch(self, project_and_group, branch): try: debug("Setting branch '%s' as unprotected", branch) self.gitlab.unprotect_branch_new_api(project_and_group, branch) except NotFoundException: message = f"Branch '{branch}' not found when trying to set it as protected/unprotected!" if self.strict: fatal( message, exit_code=EXIT_PROCESSING_ERROR, ) else: warning(message)
def _verify_if_projects_exist_and_get_archived_projects( self, projects: list) -> list: archived = [] for project in projects: try: project_object = self.gitlab.get_project_case_insensitive( project) if project_object["archived"]: archived.append(project_object["path_with_namespace"]) except NotFoundException: fatal( f"Configuration contains project {project} but it cannot be found in GitLab!", exit_code=EXIT_INVALID_INPUT, ) return archived
def __init__(self, output_file): if output_file: try: self.output_file = open(output_file, "w") debug( f"Opened file {self.output_file} to write the effective configs to." ) except Exception as e: fatal( f"Error when trying to open {self.output_file} to write the effective configs to: {e}", exit_code=EXIT_INVALID_INPUT, ) else: self.output_file = None self.config = {}
def _process_configuration(self, project_and_group: str, configuration: dict): enforce_members = configuration.get("members|enforce", False) groups = configuration.get("members|groups", {}) users = configuration.get("members|users", {}) if not groups and not users and not enforce_members: fatal( "Project members configuration section has to contain" " either 'users' or 'groups' non-empty keys" " (unless you want to enforce no direct members).", exit_code=EXIT_INVALID_INPUT, ) self._process_groups(project_and_group, groups, enforce_members) self._process_users(project_and_group, users, enforce_members)
def apply_branch_protection_configuration( self, project_and_group, configuration, branch ): try: requested_configuration = configuration["branches"][branch] if requested_configuration.get("protected"): self.protect_branch(project_and_group, configuration, branch) else: self.unprotect_branch(project_and_group, branch) except NotFoundException: message = f"Branch '{branch}' not found when trying to set it as protected/unprotected!" if self.strict: fatal( message, exit_code=EXIT_PROCESSING_ERROR, ) else: warning(message)
def get_branch_protection_config_type( self, project_and_group, requested_configuration, branch ): # for new API any keys needs to be defined... if any( key in requested_configuration for key in self.new_api_keys + self.extra_param_keys ): return "new" # ...while for the old API - *all* of them if all(key in requested_configuration for key in self.old_api_keys): return "old" else: fatal( f"Invalid configuration for protecting branches in project '{project_and_group}'," f" branch '{branch}' - missing keys.", exit_code=EXIT_INVALID_INPUT, )
def initialize_configuration_and_gitlab(self): try: if hasattr(self, "config_string"): gitlab = GitLab(config_string=self.config_string) else: gitlab = GitLab(config_path=self.config) configuration = gitlab.get_configuration() self.access_levels_transformer.transform(configuration) return gitlab, configuration except ConfigFileNotFoundException as e: fatal( f"Config file not found at: {e}", exit_code=EXIT_INVALID_INPUT, ) except ConfigInvalidException as e: fatal( f"Invalid config:\n{e.underlying}", exit_code=EXIT_INVALID_INPUT, ) except TestRequestFailedException as e: fatal( f"GitLab test request failed:\n{e.underlying}", exit_code=EXIT_PROCESSING_ERROR, )
def find_almost_duplicates(self): # in GitLab groups and projects names are de facto case insensitive: # you can change the case of both name and path BUT you cannot create # 2 groups which names differ only with case and the same thing for # projects. therefore we cannot allow such entries in the config, # as they would be ambiguous. for path in [ "projects_and_groups", "skip_groups", "skip_projects", ]: if self.get(path, 0): almost_duplicates = self._find_almost_duplicates(path) if almost_duplicates: fatal( f"There are almost duplicates in the keys of {path} - they differ only in case.\n" f"They are: {', '.join(almost_duplicates)}\n" f"This is not allowed as we ignore the case for group and project names.", exit_code=EXIT_INVALID_INPUT, )
def show_header( target, groups_and_projects_provider, non_empty_configs_provider, ): if target == "ALL": info(">>> Getting ALL groups and projects...") elif target == "ALL_DEFINED": info(">>> Getting ALL_DEFINED groups and projects...") else: info(">>> Getting requested groups/projects...") groups, projects = groups_and_projects_provider.get_groups_and_projects( target) if len(groups.get_effective()) == 0 and len(projects.get_effective()) == 0: if target == "ALL": error_message = "GitLab has no projects and groups!" elif target == "ALL_DEFINED": error_message = ( "Configuration does not have any groups or projects defined!") else: error_message = f"Project or group {target} cannot be found in GitLab!" fatal( error_message, exit_code=EXIT_INVALID_INPUT, ) ( groups, projects, ) = non_empty_configs_provider.omit_groups_and_projects_with_empty_configs( groups, projects) show_input_entities(groups) show_input_entities(projects) return projects.get_effective(), groups.get_effective()
def _process_configuration(self, group: str, configuration: dict): enforce_group_members = self._is_enforce_enabled(configuration) ( groups_to_set_by_group_path, users_to_set_by_username, ) = self._get_groups_and_users_to_set(configuration) if (enforce_group_members and not groups_to_set_by_group_path and not users_to_set_by_username): fatal( "Group members configuration section has to contain" " some 'users' or 'groups' defined as Owners," " if you want to enforce them (GitLab requires it).", exit_code=EXIT_INVALID_INPUT, ) self._process_groups(group, groups_to_set_by_group_path, enforce_group_members) self._process_users(group, users_to_set_by_username, enforce_group_members)
def __init__(self, config_path=None, config_string=None): if config_path and config_string: fatal( "Please initialize with either config_path or config_string, not both.", exit_code=EXIT_INVALID_INPUT, ) try: if config_string: self.config = self._parse_yaml(config_string, config_string=True) self.config_dir = "." else: # maybe config_path config_path = self._get_config_path(config_path) self.config = self._parse_yaml(config_path, config_string=False) self.config_dir = os.path.dirname(config_path) # below checks are only needed in the non-test mode, when the config is read from file if self.config.get("example_config"): fatal( "Example config detected, aborting.\n" "Haven't you forgotten to use `-c <config_file>` parameter?\n" "If you created your config based on the example config.yml," " then please remove 'example_config' key.", exit_code=EXIT_INVALID_INPUT, ) if self.config.get("config_version", 1) != 2: fatal( "This version of GitLabForm requires 'config_version: 2' entry in the config.\n" "This ensures that when the application behavior changes in a backward incompatible way," " you won't apply unexpected configuration to your GitLab instance.\n" "Please read the upgrading guide here: https://bit.ly/3ub1g5C\n", exit_code=EXIT_INVALID_INPUT, ) # we are NOT checking for the existence of non-empty 'projects_and_groups' key here # as it would break using GitLabForm as a library except (FileNotFoundError, IOError): raise ConfigFileNotFoundException(config_path) except Exception as e: raise ConfigInvalidException(e)
def report_performance( *, profile: str, bench_path: Path, iterations: int, compare_results: bool, upload_results: bool, ) -> None: branch = get_branch_name() if branch is None: ui.fatal("Not on a branch, can't report benchmarks") # Help mypy infering that branch is no longer of type Optional[str] but str assert branch is not None _, commit_id = tankerci.git.run_captured(Path.cwd(), "rev-parse", "HEAD") if profile not in BENCHMARK_PROFILE_TO_BUILD_TARGET: ui.fatal(f"We don't benchmark {profile}") bench_binary = bench_path / "bench_tanker" if platform.system() == "Windows": bench_binary = bench_binary.with_suffix(".exe") bench_output = bench_path / "benchmarks.json" if not bench_binary.exists(): ui.fatal("No benchmark binary to run") tankerci.run( str(bench_binary), f"--benchmark_out={bench_output}", "--benchmark_out_format=json", f"--benchmark_repetitions={iterations}", "--benchmark_report_aggregates_only", ) bench_results = json.loads(bench_output.read_text()) if bench_results["context"]["library_build_type"] != "release": ui.fatal("Benchmark ran on a non-release build, check your config") if bench_results["context"]["cpu_scaling_enabled"]: ui.warning("This machine has CPU scaling enabled") hostname = os.environ.get("CI_RUNNER_DESCRIPTION", None) if not hostname: hostname = socket.gethostname() benchmark_aggregates: Dict[str, Dict[str, int]] = {} for benchmark in bench_results["benchmarks"]: name = benchmark["run_name"].lower() aggregate = benchmark["aggregate_name"] real_time = benchmark["real_time"] time_unit = benchmark["time_unit"] if time_unit == "ms": real_time /= 1000 else: raise RuntimeError(f"unimplemented time unit: {time_unit}") if name not in benchmark_aggregates: benchmark_aggregates[name] = {} benchmark_aggregates[name][aggregate] = real_time # Post a comparison table to the merge request? if compare_results: response = tankerci.reporting.query_last_metrics( "benchmark", group_by="scenario", tags=["scenario"], fields=["real_time", "stddev"], where={"branch": "master", "project": "sdk-native"}, ) master_results = {} for point in response["results"][0]["series"]: result = tankerci.benchmark.data_point_to_bench_result(point) if result["stddev"] is None: result["stddev"] = 0 # Old benchmarks did not have a stddev master_results[result["name"]] = result master_size = fetch_lib_size_for_branch("master") new_size = fetch_lib_size_for_branch(branch) result_message = tankerci.benchmark.format_benchmark_table( benchmark_aggregates, master_results, master_size, new_size ) tankerci.benchmark.post_gitlab_mr_message("sdk-native", result_message) # Save results to InfluxDB? if upload_results: for name, results in benchmark_aggregates.items(): tankerci.reporting.send_metric( "benchmark", tags={ "project": "sdk-native", "branch": branch, "build-target": BENCHMARK_PROFILE_TO_BUILD_TARGET[profile], "scenario": name, "host": hostname, }, fields={ "real_time": results["median"], "stddev": results["stddev"], "commit_id": commit_id, "profile": profile, }, )
def modify_file_dealing_with_branch_protection( self, project_and_group, branch, file, operation, configuration, new_content=None, ): # perhaps your user permissions are ok to just perform this operation regardless # of the branch protection... try: self.just_modify_file( project_and_group, branch, file, operation, configuration, new_content ) except UnexpectedResponseException as e: if ( e.response_status_code == 400 and "You are not allowed to push into this branch" in e.response_text ): # ...but if not, then we can unprotect the branch, but only if we know how to # protect it again... if configuration.get("branches|" + branch + "|protected"): debug( f"> Temporarily unprotecting the branch to {operation} a file in it..." ) self.branch_protector.unprotect_branch(project_and_group, branch) else: fatal( f"Operation {operation} on file {file} in branch {branch} not permitted," f" but we don't have a branch protection configuration provided for this" f" branch. Breaking as we cannot unprotect the branch as we would not know" f" how to protect it again.", EXIT_INVALID_INPUT, ) try: self.just_modify_file( project_and_group, branch, file, operation, configuration, new_content, ) finally: # ...and protect the branch again after the operation if configuration.get("branches|" + branch + "|protected"): debug("> Protecting the branch again.") self.branch_protector.protect_branch( project_and_group, configuration, branch ) else: raise e
def __init__(self, include_archived_projects=True, target=None, config_string=None): if target and config_string: # this mode is basically only for testing self.target = target self.config_string = config_string self.verbose = True self.debug = True self.strict = True self.start_from = 1 self.start_from_group = 1 self.noop = False self.output_file = None self.skip_version_check = True self.include_archived_projects = include_archived_projects self.just_show_version = False self.terminate_after_error = True self.only_sections = "all" self.configure_output(tests=True) else: # normal mode ( self.target, self.config, self.verbose, self.debug, self.strict, self.start_from, self.start_from_group, self.noop, self.output_file, self.skip_version_check, self.include_archived_projects, self.just_show_version, self.terminate_after_error, self.only_sections, ) = self.parse_args() self.configure_output() show_version(self.skip_version_check) if self.just_show_version: sys.exit(0) if not self.target: fatal( "target parameter is required.", exit_code=EXIT_INVALID_INPUT, ) self.access_levels_transformer = AccessLevelsTransformer self.gitlab, self.configuration = self.initialize_configuration_and_gitlab( ) self.group_processors = GroupProcessors(self.gitlab, self.configuration, self.strict) self.project_processors = ProjectProcessors(self.gitlab, self.configuration, self.strict) self.groups_and_projects_provider = GroupsAndProjectsProvider( self.gitlab, self.configuration, self.include_archived_projects, ) self.non_empty_configs_provider = NonEmptyConfigsProvider( self.configuration, self.group_processors, self.project_processors)
def run(self): projects, groups = show_header( self.target, self.groups_and_projects_provider, self.non_empty_configs_provider, ) group_number = 0 successful_groups = 0 failed_groups = {} effective_configuration = EffectiveConfiguration(self.output_file) for group in groups: group_number += 1 if group_number < self.start_from_group: info_group_count( "@", group_number, len(groups), cli_ui.yellow, f"Skipping group {group} as requested to start from {self.start_from_group}...", cli_ui.reset, ) continue configuration = self.configuration.get_effective_config_for_group( group) effective_configuration.add_placeholder(group) info_group_count( "@", group_number, len(groups), f"Processing group: {group}", ) try: self.group_processors.process_entity( group, configuration, dry_run=self.noop, effective_configuration=effective_configuration, only_sections=self.only_sections, ) successful_groups += 1 except Exception as e: failed_groups[group_number] = group trace = traceback.format_exc() message = f"Error occurred while processing group {group}, exception:\n\n{e}\n\n{trace}" if self.terminate_after_error: effective_configuration.write_to_file() fatal( message, exit_code=EXIT_PROCESSING_ERROR, ) else: warning(message) finally: debug( f"@ ({group_number}/{len(groups)}) FINISHED Processing group: {group}" ) project_number = 0 successful_projects = 0 failed_projects = {} for project_and_group in projects: project_number += 1 if project_number < self.start_from: info_project_count( "*", project_number, len(projects), cli_ui.yellow, f"Skipping project {project_and_group} as requested to start from {self.start_from}...", cli_ui.reset, ) continue configuration = self.configuration.get_effective_config_for_project( project_and_group) effective_configuration.add_placeholder(project_and_group) info_project_count( "*", project_number, len(projects), f"Processing project: {project_and_group}", ) try: self.project_processors.process_entity( project_and_group, configuration, dry_run=self.noop, effective_configuration=effective_configuration, only_sections=self.only_sections, ) successful_projects += 1 except Exception as e: failed_projects[project_number] = project_and_group trace = traceback.format_exc() message = f"Error occurred while processing project {project_and_group}, exception:\n\n{e}\n\n{trace}" if self.terminate_after_error: effective_configuration.write_to_file() fatal( message, exit_code=EXIT_PROCESSING_ERROR, ) else: warning(message) finally: debug( f"* ({project_number}/{len(projects)})" f" FINISHED Processing project: {project_and_group}", ) effective_configuration.write_to_file() show_summary( groups, projects, successful_groups, successful_projects, failed_groups, failed_projects, )
def __init__(self, config_path=None, config_string=None): if config_path and config_string: cli_ui.fatal( "Please initialize with either config_path or config_string, not both." ) sys.exit(EXIT_INVALID_INPUT) try: if config_string: cli_ui.debug("Reading config from provided string.") self.config = yaml.safe_load(textwrap.dedent(config_string)) self.config_dir = "." else: # maybe config_path if "APP_HOME" in os.environ: # using this env var should be considered unofficial, we need this temporarily # for backwards compatibility. support for it may be removed without notice, do not use it! config_path = os.path.join(os.environ["APP_HOME"], "config.yml") elif not config_path: # this case is only meant for using gitlabform as a library config_path = os.path.join( str(Path.home()), ".gitlabform", "config.yml" ) elif config_path in [os.path.join(".", "config.yml"), "config.yml"]: # provided points to config.yml in the app current working dir config_path = os.path.join(os.getcwd(), "config.yml") cli_ui.debug(f"Reading config from file: {config_path}") with open(config_path, "r") as ymlfile: self.config = yaml.safe_load(ymlfile) logging.debug("Config parsed successfully as YAML.") # we need config path for accessing files for relative paths self.config_dir = os.path.dirname(config_path) if self.config.get("example_config"): cli_ui.fatal( "Example config detected, aborting.\n" "Haven't you forgotten to use `-c <config_file>` parameter?\n" "If you created your config based on the example config.yml," " then please remove 'example_config' key." ) sys.exit(EXIT_INVALID_INPUT) if self.config.get("config_version", 1) != 2: cli_ui.fatal( "This version of GitLabForm requires 'config_version: 2' entry in the config.\n" "This ensures that when the application behavior changes in a backward incompatible way," " you won't apply unexpected configuration to your GitLab instance.\n" "Please read the upgrading guide here: https://bit.ly/3ub1g5C\n" ) sys.exit(EXIT_INVALID_INPUT) try: self.config.get("projects_and_groups") except KeyNotFoundException: cli_ui.fatal("'projects_and_groups' key in the config is required.") sys.exit(EXIT_INVALID_INPUT) except (FileNotFoundError, IOError): raise ConfigFileNotFoundException(config_path) except Exception: if config_path: raise ConfigInvalidException(config_path) else: raise ConfigInvalidException(config_string)
def transform(cls, configuration: Configuration): logging_args = SimpleNamespace(quiet=False, verbose=False, debug=False) log = ConsolePrinter(logging_args) processor = Processor(log, configuration.config) # [.!<100] effectively means that the value is non-numerical paths_to_hashes = [ # # branches, old syntax "**.push_access_level[.!<100]", "**.merge_access_level[.!<100]", "**.unprotect_access_level[.!<100]", # members & group members "**.access_level[.!<100]", "**.group_access[.!<100]", # old syntax "**.group_access_level[.!<100]", # tags "**.create_access_level[.!<100]", ] for path in paths_to_hashes: try: for node_coordinate in processor.get_nodes(path): try: access_level_string = str(node_coordinate.node) node_coordinate.parent[ node_coordinate.parentref ] = AccessLevel.get_value(access_level_string) except KeyError: fatal( f"Configuration string '{access_level_string}' is not one of the valid access levels:" f" {', '.join(AccessLevel.get_canonical_names())}", exit_code=EXIT_INVALID_INPUT, ) except YAMLPathException: pass # there are different than the above, as they are elements of arrays # so we need different search query and an extra condition for # transformation paths_to_arrays = [ # # branches, new GitLab Premium syntax "**.allowed_to_push.*.[access_level!<100]", "**.allowed_to_merge.*.[access_level!<100]", "**.allowed_to_unprotect.*.[access_level!<100]", ] for path in paths_to_arrays: try: for node_coordinate in processor.get_nodes(path): if node_coordinate.parentref == "access_level": try: access_level_string = str(node_coordinate.node) node_coordinate.parent[ node_coordinate.parentref ] = AccessLevel.get_value(access_level_string) except KeyError: fatal( f"Configuration string '{access_level_string}' is not one of the valid access levels:" f" {', '.join(AccessLevel.get_canonical_names())}", exit_code=EXIT_INVALID_INPUT, ) except YAMLPathException: pass
def init( working_path: Path, *, current_version: str, use_pyproject: bool = False ) -> None: """Interactively creates a new tbump.toml""" ui.info_1("Generating tbump config file") if use_pyproject: text = "[tool.tbump]\n" key_prefix = "tool.tbump." cfg_path = working_path / "pyproject.toml" else: text = "" key_prefix = "" cfg_path = working_path / "tbump.toml" if cfg_path.exists(): ui.fatal(cfg_path, "already exists") text += textwrap.dedent( """\ # Uncomment this if your project is hosted on GitHub: # github_url = "https://github.com/<user or organization>/<project>/" [@key_prefix@version] current = "@current_version@" # Example of a semver regexp. # Make sure this matches current_version before # using tbump regex = ''' (?P<major>\\d+) \\. (?P<minor>\\d+) \\. (?P<patch>\\d+) ''' [@key_prefix@git] message_template = "Bump to {new_version}" tag_template = "v{new_version}" # For each file to patch, add a [[@key_prefix@file]] config # section containing the path of the file, relative to the # tbump.toml location. [[@key_prefix@file]] src = "..." # You can specify a list of commands to # run after the files have been patched # and before the git commit is made # [[@key_prefix@before_commit]] # name = "check changelog" # cmd = "grep -q {new_version} Changelog.rst" # Or run some commands after the git tag and the branch # have been pushed: # [[@key_prefix@after_push]] # name = "publish" # cmd = "./publish.sh" """ ) text = text.replace("@current_version@", current_version) text = text.replace("@key_prefix@", key_prefix) with cfg_path.open("a") as f: f.write(text) ui.info_2(ui.check, "Generated", cfg_path)
def protect_branch(self, project_and_group, configuration, branch): try: requested_configuration = configuration["branches"][branch] config_type = self.get_branch_protection_config_type( project_and_group, requested_configuration, branch ) if config_type == "old": self.protect_using_old_api( requested_configuration, project_and_group, branch ) elif config_type == "new": # when configuration contains at least one of allowed_to_push and allowed_to_merge if any( extra_key in requested_configuration for extra_key in self.extra_param_keys ): for extra_param_key in self.extra_param_keys: # check if an extra_param is in config and it contain user parameter if extra_param_key in requested_configuration and any( "user" in d for d in requested_configuration[extra_param_key] ): for extra_config in requested_configuration[ extra_param_key ]: # loop over the array of extra param and get the user_id related to user if "user" in extra_config.keys(): user_id = self.gitlab._get_user_id( extra_config.pop("user") ) extra_config["user_id"] = user_id if self.configuration_update_needed( requested_configuration, project_and_group, branch ): self.protect_using_new_api( requested_configuration, project_and_group, branch ) else: debug( "Skipping setting branch '%s' protection configuration because it's already as requested.", branch, ) if "code_owner_approval_required" in requested_configuration: self.set_code_owner_approval_required( requested_configuration, project_and_group, branch ) except NotFoundException: message = f"Branch '{branch}' not found when trying to set it as protected/unprotected!" if self.strict: fatal( message, exit_code=EXIT_PROCESSING_ERROR, ) else: warning(message)
def _process_configuration(self, project_and_group: str, configuration: dict): for file in sorted(configuration["files"]): debug("Processing file '%s'...", file) if configuration.get("files|" + file + "|skip"): debug("Skipping file '%s'", file) continue if configuration["files"][file]["branches"] == "all": all_branches = self.gitlab.get_branches(project_and_group) branches = sorted(all_branches) elif configuration["files"][file]["branches"] == "protected": protected_branches = self.gitlab.get_protected_branches( project_and_group ) branches = sorted(protected_branches) else: all_branches = self.gitlab.get_branches(project_and_group) branches = [] for branch in configuration["files"][file]["branches"]: if branch in all_branches: branches.append(branch) else: message = f"! Branch '{branch}' not found, not processing file '{file}' in it" if self.strict: fatal( message, exit_code=EXIT_INVALID_INPUT, ) else: warning(message) for branch in branches: verbose(f"Processing file '{file}' in branch '{branch}'") if configuration.get( "files|" + file + "|content" ) and configuration.get("files|" + file + "|file"): fatal( f"File '{file}' in '{project_and_group}' has both `content` and `file` set - " "use only one of these keys.", exit_code=EXIT_INVALID_INPUT, ) if configuration.get("files|" + file + "|delete"): try: self.gitlab.get_file(project_and_group, branch, file) debug("Deleting file '%s' in branch '%s'", file, branch) self.modify_file_dealing_with_branch_protection( project_and_group, branch, file, "delete", configuration, ) except NotFoundException: debug( "Not deleting file '%s' in branch '%s' (already doesn't exist)", file, branch, ) else: # change or create file if configuration.get("files|" + file + "|content"): new_content = configuration.get("files|" + file + "|content") else: path_in_config = Path( configuration.get("files|" + file + "|file") ) if path_in_config.is_absolute(): # TODO: does this work? we are reading the content twice in this case... path = path_in_config.read_text() else: # relative paths are relative to config file location path = Path( os.path.join( self.config.config_dir, str(path_in_config) ) ) new_content = path.read_text() if configuration.get("files|" + file + "|template", True): new_content = self.get_file_content_as_template( new_content, project_and_group, **configuration.get("files|" + file + "|jinja_env", dict()), ) try: current_content = self.gitlab.get_file( project_and_group, branch, file ) if current_content != new_content: if configuration.get("files|" + file + "|overwrite"): debug("Changing file '%s' in branch '%s'", file, branch) self.modify_file_dealing_with_branch_protection( project_and_group, branch, file, "modify", configuration, new_content, ) else: debug( "Not changing file '%s' in branch '%s' - overwrite flag not set.", file, branch, ) else: debug( "Not changing file '%s' in branch '%s' - it's content is already" " as provided)", file, branch, ) except NotFoundException: debug("Creating file '%s' in branch '%s'", file, branch) self.modify_file_dealing_with_branch_protection( project_and_group, branch, file, "add", configuration, new_content, ) if configuration.get("files|" + file + "|only_first_branch"): verbose("Skipping other branches for this file, as configured.") break