def _process_configuration(self, project_and_group: str, configuration: dict): for tag in sorted(configuration["tags"]): try: if configuration["tags"][tag]["protected"]: create_access_level = ( configuration["tags"][tag]["create_access_level"] if "create_access_level" in configuration["tags"][tag] else None) debug("Setting tag '%s' as *protected*", tag) try: # try to unprotect first self.gitlab.unprotect_tag(project_and_group, tag) except NotFoundException: pass self.gitlab.protect_tag(project_and_group, tag, create_access_level) else: debug("Setting tag '%s' as *unprotected*", tag) self.gitlab.unprotect_tag(project_and_group, tag) except NotFoundException: message = f"Tag '{tag}' not found when trying to set it as protected/unprotected!" if self.strict: fatal( message, exit_code=EXIT_PROCESSING_ERROR, ) else: warning(message)
def status(self): if not (os.path.exists(self.restore_log_path) and os.path.isfile(self.restore_log_path)): cli_ui.warning('No restore log found!') sys.exit(0) # FIXME: Don't exit, rather throw an exception restore_log = self._get_restore_log() if not self.rubrik: creds = self._read_credentials(ignore_stored=True, presets={'address': restore_log['cluster']['ip']}) self._connect(creds) statuses = [] for job in restore_log['jobs']: klass = config_class(job['configType']) status = klass(self.path, self.rubrik, logging.getLogger()).status(job) if status: statuses.append(status) status_rows = list(map( lambda s: [ (status_color(s[0]), s[0]), (cli_ui.lightgray, s[1]), (cli_ui.lightgray, s[2]), (cli_ui.lightgray, s[3]), (cli_ui.bold, s[4])], statuses )) cli_ui.info('\nBackup Id:', cli_ui.turquoise, restore_log['backupId'], end='\n\n') cli_ui.info_table(status_rows, headers=['Status', 'Start time', 'End time', 'Type', 'Name'])
def _get_groups_and_users_to_set(configuration: dict) -> (dict, dict): # read the configs in a single place, as the syntax is changing and there are a lot of possible # deprecation notices to be printed groups_to_set_by_group_path = configuration.get( "group_shared_with", {}) if groups_to_set_by_group_path: warning( "Using `group_shared_with:` is deprecated and will be removed in future versions " "of GitLabForm. Please move its contents to `group_members.groups`." ) else: groups_to_set_by_group_path = configuration.get( "group_members|groups", {}) users_to_set_by_username = configuration.get("group_members", {}) if users_to_set_by_username: proper_users_to_set_by_username = configuration.get( "group_members|users", {}) if proper_users_to_set_by_username: users_to_set_by_username = proper_users_to_set_by_username else: users_to_set_by_username.pop("enforce", None) users_to_set_by_username.pop("users", None) users_to_set_by_username.pop("groups", None) if users_to_set_by_username: warning( "Putting users as target members of the groups directly under `group_members` key is deprecated " "and will be removed in future versions of GitLabForm. " "Please put them under `group_members.users` key instead." ) return groups_to_set_by_group_path, users_to_set_by_username
def main(args: argparse.Namespace) -> None: workspace = tsrc.cli.get_workspace(args) cmd_runner = CmdRunner(workspace.root_path, args.cmd, args.cmd_as_str, shell=args.shell) manifest = workspace.local_manifest.get_manifest() workspace_config = workspace.config groups_from_config = workspace_config.repo_groups all_remote_repos = manifest.get_repos(all_=True) cloned_repos = [ x for x in all_remote_repos if (workspace.root_path / x.src).exists() ] if args.groups_from_config: requested_repos = manifest.get_repos(groups=groups_from_config) elif args.groups: requested_repos = manifest.get_repos(groups=args.groups) else: requested_repos = cloned_repos found = [x for x in requested_repos if x in cloned_repos] missing = [x for x in requested_repos if x not in cloned_repos] tsrc.run_sequence(found, cmd_runner) if missing: ui.warning( "The following repos were requested but missing from the workspace:" ) for repo in missing: ui.info("*", repo.src, fileobj=sys.stderr) raise MissingRepos(missing) else: ui.info("OK", ui.check)
def wrapped(args: ArgsList = None) -> None: colored_traceback.add_hook() try: main_func(args=args) except tsrc.Error as e: # "expected" failure, display it and exit if e.message: ui.error(e.message) sys.exit(1) except KeyboardInterrupt: ui.warning("Interrupted by user, quitting") sys.exit(1)
def wrapped(args: ArgsList = None) -> None: colored_traceback.add_hook() try: main_func(args=args) except tsrc.Error as e: # "expected" failure, display it and exit note: we allow # tsrc.Error instances to have an empty message. In that # case, do not print anything and assume relevant info has # already been printed. if e.message: ui.error(e.message) sys.exit(1) except KeyboardInterrupt: ui.warning("Interrupted by user, quitting") sys.exit(1)
def unprotect_branch(self, project_and_group, branch): try: debug("Setting branch '%s' as unprotected", branch) self.gitlab.unprotect_branch_new_api(project_and_group, branch) except NotFoundException: message = f"Branch '{branch}' not found when trying to set it as protected/unprotected!" if self.strict: fatal( message, exit_code=EXIT_PROCESSING_ERROR, ) else: warning(message)
def _process_configuration(self, project_and_group: str, configuration: dict): if (self.gitlab.get_project_settings(project_and_group) ["builds_access_level"] == "disabled"): warning( "Builds disabled in this project so I can't set secret variables here." ) return debug( "Secret variables BEFORE: %s", self.gitlab.get_secret_variables(project_and_group), ) for secret_variable in sorted(configuration["secret_variables"]): if "delete" in configuration["secret_variables"][secret_variable]: key = configuration["secret_variables"][secret_variable]["key"] if configuration["secret_variables"][secret_variable][ "delete"]: verbose( f"Deleting {secret_variable}: {key} in project {project_and_group}" ) try: self.gitlab.delete_secret_variable( project_and_group, key) except: warning( f"Could not delete variable {key} in group {project_and_group}" ) continue verbose(f"Setting secret variable: {secret_variable}") try: self.gitlab.put_secret_variable( project_and_group, configuration["secret_variables"][secret_variable], ) except NotFoundException: self.gitlab.post_secret_variable( project_and_group, configuration["secret_variables"][secret_variable], ) debug( "Secret variables AFTER: %s", self.gitlab.get_secret_variables(project_and_group), )
def _is_enforce_enabled(configuration: dict) -> bool: # read if enforcing is enabled from the config once - it will be used for both sharing group with groups # as well as assigning single users to group enforce_group_members = configuration.get("enforce_group_members", False) if enforce_group_members: warning( "Using `enforce_group_members` key is deprecated and will be removed in future versions " "of GitLabForm. Please use `group_members.enforce` key instead." ) else: enforce_group_members = configuration.get("group_members|enforce", False) return enforce_group_members
def protect_branch(self, project_and_group, configuration, branch): try: requested_configuration = configuration["branches"][branch] if requested_configuration.get("protected"): # note that for old API *all* keys have to be defined... if all(key in requested_configuration for key in self.old_api_keys): # unprotect first to reset 'allowed to merge' and 'allowed to push' fields self.protect_using_old_api(requested_configuration, project_and_group, branch) # ...while for the new one we need ANY new key elif any(key in requested_configuration for key in self.new_api_keys): if self.configuration_update_needed( requested_configuration, project_and_group, branch): self.protect_using_new_api(requested_configuration, project_and_group, branch) else: logging.debug( "Skipping set branch '%s' access levels because they're already set" ) return # TODO: is this ok that we skip below code in this case? if "code_owner_approval_required" in requested_configuration: self.set_code_owner_approval_required( requested_configuration, project_and_group, branch) else: self.unprotect(project_and_group, branch) except NotFoundException: message = f"Branch '{branch}' not found when trying to set it as protected/unprotected!" if self.strict: cli_ui.error(message) sys.exit(EXIT_PROCESSING_ERROR) else: cli_ui.warning(message)
def protect_using_old_api(self, requested_configuration, project_and_group, branch): warning( f"Using keys {self.old_api_keys} for configuring protected" " branches is deprecated and will be removed in future versions of GitLabForm." f" Please start using new keys: {self.new_api_keys}" ) debug("Setting branch '%s' as *protected*", branch) # Protected Branches API is one of those that do not support editing entities # (PUT is not documented for it, at least). so you need to delete existing # branch protection (DELETE) and recreate it (POST) to perform an update # (otherwise you get HTTP 409 "Protected branch 'foo' already exists") self.gitlab.unprotect_branch_new_api(project_and_group, branch) self.gitlab.protect_branch( project_and_group, branch, requested_configuration["developers_can_push"], requested_configuration["developers_can_merge"], )
def apply_branch_protection_configuration( self, project_and_group, configuration, branch ): try: requested_configuration = configuration["branches"][branch] if requested_configuration.get("protected"): self.protect_branch(project_and_group, configuration, branch) else: self.unprotect_branch(project_and_group, branch) except NotFoundException: message = f"Branch '{branch}' not found when trying to set it as protected/unprotected!" if self.strict: fatal( message, exit_code=EXIT_PROCESSING_ERROR, ) else: warning(message)
def _process_configuration(self, project_and_group: str, configuration: dict): for service in sorted(configuration["services"]): if configuration.get("services|" + service + "|delete"): cli_ui.debug(f"Deleting service: {service}") self.gitlab.delete_service(project_and_group, service) else: if ("recreate" in configuration["services"][service] and configuration["services"][service]["recreate"]): # support from this configuration key has been added in v1.13.4 # we will remove it here to avoid passing it to the GitLab API cli_ui.warning( f"Ignoring deprecated 'recreate' field in the '{service}' service config. " "Please remove it from the config file permanently as this workaround is not " "needed anymore.") del configuration["services"][service]["recreate"] cli_ui.debug(f"Setting service: {service}") self.gitlab.set_service(project_and_group, service, configuration["services"][service])
def main(args: argparse.Namespace) -> None: workspace = tsrc.cli.get_workspace(args) workspace.load_manifest() cmd_runner = CmdRunner( workspace.root_path, args.cmd, args.cmd_as_str, shell=args.shell ) manifest = workspace.local_manifest.manifest assert manifest cloned_repos = workspace.get_repos() requested_repos = manifest.get_repos(groups=args.groups) found = [x for x in requested_repos if x in cloned_repos] missing = [x for x in requested_repos if x not in cloned_repos] tsrc.run_sequence(found, cmd_runner) if missing: ui.warning("The following repos were skipped:") for repo in missing: ui.info("*", repo.src, fileobj=sys.stderr) else: ui.info("OK", ui.check)
def _process_configuration(self, group: str, configuration: dict): debug( "Group secret variables BEFORE: %s", self.gitlab.get_group_secret_variables(group), ) for secret_variable in sorted(configuration["group_secret_variables"]): if "delete" in configuration["group_secret_variables"][ secret_variable]: key = configuration["group_secret_variables"][secret_variable][ "key"] if configuration["group_secret_variables"][secret_variable][ "delete"]: verbose( f"Deleting {secret_variable}: {key} in group {group}") try: self.gitlab.delete_group_secret_variable(group, key) except: warning( f"Could not delete variable {key} in group {group}" ) continue verbose(f"Setting group secret variable: {secret_variable}") try: self.gitlab.put_group_secret_variable( group, configuration["group_secret_variables"][secret_variable]) except NotFoundException: self.gitlab.post_group_secret_variable( group, configuration["group_secret_variables"][secret_variable]) debug( "Groups secret variables AFTER: %s", self.gitlab.get_group_secret_variables(group), )
def run(self): projects, groups = show_header( self.target, self.groups_and_projects_provider, self.non_empty_configs_provider, ) group_number = 0 successful_groups = 0 failed_groups = {} effective_configuration = EffectiveConfiguration(self.output_file) for group in groups: group_number += 1 if group_number < self.start_from_group: info_group_count( "@", group_number, len(groups), cli_ui.yellow, f"Skipping group {group} as requested to start from {self.start_from_group}...", cli_ui.reset, ) continue configuration = self.configuration.get_effective_config_for_group( group) effective_configuration.add_placeholder(group) info_group_count( "@", group_number, len(groups), f"Processing group: {group}", ) try: self.group_processors.process_entity( group, configuration, dry_run=self.noop, effective_configuration=effective_configuration, only_sections=self.only_sections, ) successful_groups += 1 except Exception as e: failed_groups[group_number] = group trace = traceback.format_exc() message = f"Error occurred while processing group {group}, exception:\n\n{e}\n\n{trace}" if self.terminate_after_error: effective_configuration.write_to_file() fatal( message, exit_code=EXIT_PROCESSING_ERROR, ) else: warning(message) finally: debug( f"@ ({group_number}/{len(groups)}) FINISHED Processing group: {group}" ) project_number = 0 successful_projects = 0 failed_projects = {} for project_and_group in projects: project_number += 1 if project_number < self.start_from: info_project_count( "*", project_number, len(projects), cli_ui.yellow, f"Skipping project {project_and_group} as requested to start from {self.start_from}...", cli_ui.reset, ) continue configuration = self.configuration.get_effective_config_for_project( project_and_group) effective_configuration.add_placeholder(project_and_group) info_project_count( "*", project_number, len(projects), f"Processing project: {project_and_group}", ) try: self.project_processors.process_entity( project_and_group, configuration, dry_run=self.noop, effective_configuration=effective_configuration, only_sections=self.only_sections, ) successful_projects += 1 except Exception as e: failed_projects[project_number] = project_and_group trace = traceback.format_exc() message = f"Error occurred while processing project {project_and_group}, exception:\n\n{e}\n\n{trace}" if self.terminate_after_error: effective_configuration.write_to_file() fatal( message, exit_code=EXIT_PROCESSING_ERROR, ) else: warning(message) finally: debug( f"* ({project_number}/{len(projects)})" f" FINISHED Processing project: {project_and_group}", ) effective_configuration.write_to_file() show_summary( groups, projects, successful_groups, successful_projects, failed_groups, failed_projects, )
def report_performance( *, profile: str, bench_path: Path, iterations: int, compare_results: bool, upload_results: bool, ) -> None: branch = get_branch_name() if branch is None: ui.fatal("Not on a branch, can't report benchmarks") # Help mypy infering that branch is no longer of type Optional[str] but str assert branch is not None _, commit_id = tankerci.git.run_captured(Path.cwd(), "rev-parse", "HEAD") if profile not in BENCHMARK_PROFILE_TO_BUILD_TARGET: ui.fatal(f"We don't benchmark {profile}") bench_binary = bench_path / "bench_tanker" if platform.system() == "Windows": bench_binary = bench_binary.with_suffix(".exe") bench_output = bench_path / "benchmarks.json" if not bench_binary.exists(): ui.fatal("No benchmark binary to run") tankerci.run( str(bench_binary), f"--benchmark_out={bench_output}", "--benchmark_out_format=json", f"--benchmark_repetitions={iterations}", "--benchmark_report_aggregates_only", ) bench_results = json.loads(bench_output.read_text()) if bench_results["context"]["library_build_type"] != "release": ui.fatal("Benchmark ran on a non-release build, check your config") if bench_results["context"]["cpu_scaling_enabled"]: ui.warning("This machine has CPU scaling enabled") hostname = os.environ.get("CI_RUNNER_DESCRIPTION", None) if not hostname: hostname = socket.gethostname() benchmark_aggregates: Dict[str, Dict[str, int]] = {} for benchmark in bench_results["benchmarks"]: name = benchmark["run_name"].lower() aggregate = benchmark["aggregate_name"] real_time = benchmark["real_time"] time_unit = benchmark["time_unit"] if time_unit == "ms": real_time /= 1000 else: raise RuntimeError(f"unimplemented time unit: {time_unit}") if name not in benchmark_aggregates: benchmark_aggregates[name] = {} benchmark_aggregates[name][aggregate] = real_time # Post a comparison table to the merge request? if compare_results: response = tankerci.reporting.query_last_metrics( "benchmark", group_by="scenario", tags=["scenario"], fields=["real_time", "stddev"], where={"branch": "master", "project": "sdk-native"}, ) master_results = {} for point in response["results"][0]["series"]: result = tankerci.benchmark.data_point_to_bench_result(point) if result["stddev"] is None: result["stddev"] = 0 # Old benchmarks did not have a stddev master_results[result["name"]] = result master_size = fetch_lib_size_for_branch("master") new_size = fetch_lib_size_for_branch(branch) result_message = tankerci.benchmark.format_benchmark_table( benchmark_aggregates, master_results, master_size, new_size ) tankerci.benchmark.post_gitlab_mr_message("sdk-native", result_message) # Save results to InfluxDB? if upload_results: for name, results in benchmark_aggregates.items(): tankerci.reporting.send_metric( "benchmark", tags={ "project": "sdk-native", "branch": branch, "build-target": BENCHMARK_PROFILE_TO_BUILD_TARGET[profile], "scenario": name, "host": hostname, }, fields={ "real_time": results["median"], "stddev": results["stddev"], "commit_id": commit_id, "profile": profile, }, )
def _process_groups(self, group: str, groups_to_set_by_group_path: dict, enforce_group_members: bool): # group users before by group name groups_before = self.gitlab.get_group_case_insensitive( group)["shared_with_groups"] debug("Group shared with BEFORE: %s", groups_before) groups_before_by_group_path = dict() for share_details in groups_before: groups_before_by_group_path[ share_details["group_full_path"]] = share_details for share_with_group_path in groups_to_set_by_group_path: group_access_to_set = groups_to_set_by_group_path[ share_with_group_path].get("group_access_level", None) if group_access_to_set: warning( "Using `group_access_level` key deprecated and will be removed in future versions " "of GitLabForm. Please rename it to `group_access`.") else: group_access_to_set = groups_to_set_by_group_path[ share_with_group_path]["group_access"] expires_at_to_set = ( groups_to_set_by_group_path[share_with_group_path] ["expires_at"] if "expires_at" in groups_to_set_by_group_path[share_with_group_path] else None) if share_with_group_path in groups_before_by_group_path: group_access_before = groups_before_by_group_path[ share_with_group_path]["group_access_level"] expires_at_before = groups_before_by_group_path[ share_with_group_path]["expires_at"] if (group_access_before == group_access_to_set and expires_at_before == expires_at_to_set): debug( "Nothing to change for group '%s' - same config now as to set.", share_with_group_path, ) else: debug( "Re-adding group '%s' to change their access level or expires at.", share_with_group_path, ) # we will remove the group first and then re-add them, # to ensure that the group has the expected access level self.gitlab.remove_share_from_group( group, share_with_group_path) self.gitlab.add_share_to_group( group, share_with_group_path, group_access_to_set, expires_at_to_set, ) else: debug( "Adding group '%s' who previously was not a member.", share_with_group_path, ) self.gitlab.add_share_to_group(group, share_with_group_path, group_access_to_set, expires_at_to_set) if enforce_group_members: # remove groups not configured explicitly groups_not_configured = set(groups_before_by_group_path) - set( groups_to_set_by_group_path) for group_path in groups_not_configured: debug( "Removing group '%s' who is not configured to be a member.", group_path, ) self.gitlab.remove_share_from_group(group, group_path) else: debug("Not enforcing group members.") debug("Group shared with AFTER: %s", self.gitlab.get_group_members(group))
def process_all(self, projects_and_groups, groups): group_number = 0 successful_groups = 0 failed_groups = {} maybe_output_file = self.try_to_get_output_file() for group in groups: group_number += 1 if group_number < self.start_from_group: info_group_count( "@", group_number, len(groups), cli_ui.yellow, f"Skipping group {group} as requested to start from {self.start_from_group}...", cli_ui.reset, ) continue configuration = self.configuration.get_effective_config_for_group( group) if configuration: info_group_count("@", group_number, len(groups), f"Processing group: {group}") self.try_to_write_header_to_output_file( group, maybe_output_file) try: self.group_processors.process_group( group, configuration, dry_run=self.noop, output_file=maybe_output_file, ) successful_groups += 1 except Exception as e: failed_groups[group_number] = group trace = traceback.format_exc() message = f"Error occurred while processing group {group}, exception:\n\n{e}\n\n{trace}" if self.terminate_after_error: self.try_to_close_output_file(maybe_output_file) cli_ui.error(message) sys.exit(EXIT_PROCESSING_ERROR) else: cli_ui.warning(message) finally: logging.debug( f"@ ({group_number}/{len(groups)}) FINISHED Processing group: {group}" ) else: self.try_to_write_header_to_output_file(group, maybe_output_file, empty_config=True) info_group_count( "@", group_number, len(groups), cli_ui.yellow, f"Skipping group {group} as it has empty effective config.", cli_ui.reset, ) project_number = 0 successful_projects = 0 failed_projects = {} for project_and_group in projects_and_groups: project_number += 1 if project_number < self.start_from: info_project_count( "*", project_number, len(projects_and_groups), cli_ui.yellow, f"Skipping project {project_and_group} as requested to start from {self.start_from}...", cli_ui.reset, ) continue configuration = self.configuration.get_effective_config_for_project( project_and_group) if configuration: info_project_count( "*", project_number, len(projects_and_groups), f"Processing project: {project_and_group}", ) self.try_to_write_header_to_output_file( project_and_group, maybe_output_file) try: self.project_processors.process_project( project_and_group, configuration, dry_run=self.noop, output_file=maybe_output_file, ) successful_projects += 1 except Exception as e: failed_projects[project_number] = project_and_group trace = traceback.format_exc() message = f"Error occurred while processing project {project_and_group}, exception:\n\n{e}\n\n{trace}" if self.terminate_after_error: self.try_to_close_output_file(maybe_output_file) cli_ui.error(message) sys.exit(EXIT_PROCESSING_ERROR) else: cli_ui.warning(message) finally: logging.debug( f"* ({project_number}/{len(projects_and_groups)}) FINISHED Processing project: {project_and_group}", ) else: self.try_to_write_header_to_output_file(project_and_group, maybe_output_file, empty_config=True) info_project_count( "*", project_number, len(projects_and_groups), cli_ui.yellow, f"Skipping project {project_and_group} as it has empty effective config.", cli_ui.reset, ) self.try_to_close_output_file(maybe_output_file) cli_ui.info_1( f"# of groups processed successfully: {successful_groups}") cli_ui.info_1( f"# of projects processed successfully: {successful_projects}") if len(failed_groups) > 0: cli_ui.info_1(cli_ui.red, f"# of groups failed: {len(failed_groups)}", cli_ui.reset) for group_number in failed_groups.keys(): cli_ui.info_1( cli_ui.red, f"Failed group {group_number}: {failed_groups[group_number]}", cli_ui.reset, ) if len(failed_projects) > 0: cli_ui.info_1( cli_ui.red, f"# of projects failed: {len(failed_projects)}", cli_ui.reset, ) for project_number in failed_projects.keys(): cli_ui.info_1( cli_ui.red, f"Failed project {project_number}: {failed_projects[project_number]}", cli_ui.reset, ) if len(failed_groups) > 0 or len(failed_projects) > 0: sys.exit(EXIT_PROCESSING_ERROR) elif successful_groups > 0 or successful_projects > 0: shine = cli_ui.Symbol("✨", "!!!") cli_ui.info_1( cli_ui.green, f"All requested groups/projects processes successfully!", cli_ui.reset, shine, )
def _process_configuration(self, project_and_group: str, configuration: dict): for file in sorted(configuration["files"]): logging.debug("Processing file '%s'...", file) if configuration.get("files|" + file + "|skip"): logging.debug("Skipping file '%s'", file) continue all_branches = self.gitlab.get_branches(project_and_group) if configuration["files"][file]["branches"] == "all": branches = sorted(all_branches) elif configuration["files"][file]["branches"] == "protected": protected_branches = self.gitlab.get_protected_branches( project_and_group) branches = sorted(protected_branches) else: branches = [] for branch in configuration["files"][file]["branches"]: if branch in all_branches: branches.append(branch) else: message = f"! Branch '{branch}' not found, not processing file '{file}' in it" if self.strict: cli_ui.error(message) sys.exit(EXIT_INVALID_INPUT) else: cli_ui.warning(message) for branch in branches: cli_ui.debug(f"Processing file '{file}' in branch '{branch}'") # unprotect protected branch temporarily for operations below if configuration.get("branches|" + branch + "|protected"): logging.debug( "> Temporarily unprotecting the branch for managing files in it..." ) self.gitlab.unprotect_branch(project_and_group, branch) if configuration.get("files|" + file + "|delete"): try: self.gitlab.get_file(project_and_group, branch, file) logging.debug("Deleting file '%s' in branch '%s'", file, branch) self.gitlab.delete_file( project_and_group, branch, file, self.get_commit_message_for_file_change( "delete", configuration.get("files|" + file + "|skip_ci"), ), ) except NotFoundException: logging.debug( "Not deleting file '%s' in branch '%s' (already doesn't exist)", file, branch, ) else: # change or create file if configuration.get("files|" + file + "|content") and configuration.get( "files|" + file + "|file"): cli_ui.error( f"File '{file}' in '{project_and_group}' has both `content` and `file` set - " "use only one of these keys.") sys.exit(EXIT_INVALID_INPUT) elif configuration.get("files|" + file + "|content"): new_content = configuration.get("files|" + file + "|content") else: path_in_config = Path( configuration.get("files|" + file + "|file")) if path_in_config.is_absolute(): path = path_in_config.read_text() else: # relative paths are relative to config file location path = Path( os.path.join(self.config.config_dir, str(path_in_config))) new_content = path.read_text() if configuration.get("files|" + file + "|template", True): new_content = self.get_file_content_as_template( new_content, project_and_group, **configuration.get("files|" + file + "|jinja_env", dict()), ) try: current_content = self.gitlab.get_file( project_and_group, branch, file) if current_content != new_content: if configuration.get("files|" + file + "|overwrite"): logging.debug( "Changing file '%s' in branch '%s'", file, branch) self.gitlab.set_file( project_and_group, branch, file, new_content, self.get_commit_message_for_file_change( "change", configuration.get("files|" + file + "|skip_ci"), ), ) else: logging.debug( "Not changing file '%s' in branch '%s' " "(overwrite flag not set)", file, branch, ) else: logging.debug( "Not changing file '%s' in branch '%s' (it's content is already" " as provided)", file, branch, ) except NotFoundException: logging.debug("Creating file '%s' in branch '%s'", file, branch) self.gitlab.add_file( project_and_group, branch, file, new_content, self.get_commit_message_for_file_change( "add", configuration.get("files|" + file + "|skip_ci")), ) # protect branch back after above operations if configuration.get("branches|" + branch + "|protected"): logging.debug("> Protecting the branch again.") self.branch_protector.protect_branch( project_and_group, configuration, branch) if configuration.get("files|" + file + "|only_first_branch"): cli_ui.debug( "Skipping other branches for this file, as configured." ) break
def protect_branch(self, project_and_group, configuration, branch): try: requested_configuration = configuration["branches"][branch] config_type = self.get_branch_protection_config_type( project_and_group, requested_configuration, branch ) if config_type == "old": self.protect_using_old_api( requested_configuration, project_and_group, branch ) elif config_type == "new": # when configuration contains at least one of allowed_to_push and allowed_to_merge if any( extra_key in requested_configuration for extra_key in self.extra_param_keys ): for extra_param_key in self.extra_param_keys: # check if an extra_param is in config and it contain user parameter if extra_param_key in requested_configuration and any( "user" in d for d in requested_configuration[extra_param_key] ): for extra_config in requested_configuration[ extra_param_key ]: # loop over the array of extra param and get the user_id related to user if "user" in extra_config.keys(): user_id = self.gitlab._get_user_id( extra_config.pop("user") ) extra_config["user_id"] = user_id if self.configuration_update_needed( requested_configuration, project_and_group, branch ): self.protect_using_new_api( requested_configuration, project_and_group, branch ) else: debug( "Skipping setting branch '%s' protection configuration because it's already as requested.", branch, ) if "code_owner_approval_required" in requested_configuration: self.set_code_owner_approval_required( requested_configuration, project_and_group, branch ) except NotFoundException: message = f"Branch '{branch}' not found when trying to set it as protected/unprotected!" if self.strict: fatal( message, exit_code=EXIT_PROCESSING_ERROR, ) else: warning(message)
os.mkdir(sc_path_reports) cli_ui.info_3("Directory ", sc_path_reports, " Created ") except FileExistsError: cli_ui.info_3("Directory ", sc_path_reports, " already exists") ts = calendar.timegm(time.gmtime()) verifyJS(results.url, ts) print('\n' * 2) cli_ui.info_section("RESULTADO") cli_ui.info_1("URL: " + str(results.url)) cli_ui.info_1("ARCHIVOS ENCONTRADOS EN VIRUS TOTAL: " + str(vt_count)) if (vt_count > 0): print('\n' * 2) cli_ui.info_section("DETALLE") cli_ui.info_table(data_final, headers=headers) if (vt_upload_count > 0): print('\n' * 2) cli_ui.info_section("DETALLE ARCHIVOS ENVIADOS A VIRUS TOTAL") cli_ui.info( cli_ui.darkblue, "NOTA: ESTOS ARCHIVOS HAN SIDO ENVIADOS A ANALIZAR A VIRUS TOTAL." ) cli_ui.info_table(data_files_vt, headers=headers_vt_files) else: cli_ui.warning( 'No option was selected. To check CLI options, run script in help mode: \'{} -h\'' .format(__file__))
def _process_configuration(self, project_and_group: str, configuration: dict): for file in sorted(configuration["files"]): debug("Processing file '%s'...", file) if configuration.get("files|" + file + "|skip"): debug("Skipping file '%s'", file) continue if configuration["files"][file]["branches"] == "all": all_branches = self.gitlab.get_branches(project_and_group) branches = sorted(all_branches) elif configuration["files"][file]["branches"] == "protected": protected_branches = self.gitlab.get_protected_branches( project_and_group ) branches = sorted(protected_branches) else: all_branches = self.gitlab.get_branches(project_and_group) branches = [] for branch in configuration["files"][file]["branches"]: if branch in all_branches: branches.append(branch) else: message = f"! Branch '{branch}' not found, not processing file '{file}' in it" if self.strict: fatal( message, exit_code=EXIT_INVALID_INPUT, ) else: warning(message) for branch in branches: verbose(f"Processing file '{file}' in branch '{branch}'") if configuration.get( "files|" + file + "|content" ) and configuration.get("files|" + file + "|file"): fatal( f"File '{file}' in '{project_and_group}' has both `content` and `file` set - " "use only one of these keys.", exit_code=EXIT_INVALID_INPUT, ) if configuration.get("files|" + file + "|delete"): try: self.gitlab.get_file(project_and_group, branch, file) debug("Deleting file '%s' in branch '%s'", file, branch) self.modify_file_dealing_with_branch_protection( project_and_group, branch, file, "delete", configuration, ) except NotFoundException: debug( "Not deleting file '%s' in branch '%s' (already doesn't exist)", file, branch, ) else: # change or create file if configuration.get("files|" + file + "|content"): new_content = configuration.get("files|" + file + "|content") else: path_in_config = Path( configuration.get("files|" + file + "|file") ) if path_in_config.is_absolute(): # TODO: does this work? we are reading the content twice in this case... path = path_in_config.read_text() else: # relative paths are relative to config file location path = Path( os.path.join( self.config.config_dir, str(path_in_config) ) ) new_content = path.read_text() if configuration.get("files|" + file + "|template", True): new_content = self.get_file_content_as_template( new_content, project_and_group, **configuration.get("files|" + file + "|jinja_env", dict()), ) try: current_content = self.gitlab.get_file( project_and_group, branch, file ) if current_content != new_content: if configuration.get("files|" + file + "|overwrite"): debug("Changing file '%s' in branch '%s'", file, branch) self.modify_file_dealing_with_branch_protection( project_and_group, branch, file, "modify", configuration, new_content, ) else: debug( "Not changing file '%s' in branch '%s' - overwrite flag not set.", file, branch, ) else: debug( "Not changing file '%s' in branch '%s' - it's content is already" " as provided)", file, branch, ) except NotFoundException: debug("Creating file '%s' in branch '%s'", file, branch) self.modify_file_dealing_with_branch_protection( project_and_group, branch, file, "add", configuration, new_content, ) if configuration.get("files|" + file + "|only_first_branch"): verbose("Skipping other branches for this file, as configured.") break