def on_failure(self, *, num_errors: int) -> None: ui.error(f"Command failed for {num_errors} repo(s)")
def die(message: str) -> None: ui.error(message) print(foreach_parser.epilog, end="") sys.exit(1)
def print_error(self) -> None: ui.error("Cancelled by user")
def print_error(self) -> None: ui.error("No configuration for tbump fond in", self.project_path) ui.info("Please run `tbump init` to create a tbump.toml file") ui.info("Or add a [tool.tbump] section in the pyproject.toml file")
def on_failure(self, *, num_errors: int) -> None: ui.error("Command failed for %s repo(s)" % num_errors)
def verifyJS(url, dirname): count_items = 0 global vt_count ssl._create_default_https_context = ssl._create_unverified_context r = requests.get(url) soup = BeautifulSoup(r.content, features="lxml") try: os.mkdir(sc_path_reports + "/" + str(dirname)) cli_ui.info_3("Directory ", dirname, " Created ") except FileExistsError: cli_ui.info_3("Directory ", dirname, " already exists") lista = [i.get('src') for i in soup.find_all('script') if i.get('src')] for item in lista: item = clearJS(item) myfile = filename(item) newURL = clearURL(item) ## elimina // en url inicial if newURL.startswith('/'): newURL = generateUrl(url) + newURL try: os.system("wget -q -U='" + vt_user_agent + "' -O '" + sc_path_reports + "/{0}' {1}".format(str(dirname) + "/" + myfile, newURL)) pathfile = sc_path_reports + "/" + str(dirname) + "/" + str(myfile) sha256 = sha256sum(pathfile) result = (vTotalQuery(sha256)) if (result != 'ERROR'): if (result['response_code'] == 1): permalink = (result['permalink']) total = (result['total']) positives = (result['positives']) if (positives > 0): vt_count += 1 data = [[(cli_ui.blue, str(myfile)), (cli_ui.blue, str(sha256)), (cli_ui.blue, str(positives) + "/" + str(total)), (cli_ui.blue, str(permalink))]] data_final.extend(data) else: params = {"apikey": vt_apikey, "resource": str(sha256)} headers = { "Accept-Encoding": "identity, deflate, compress, gzip", "User-Agent": vt_user_agent } files = {'file': (pathfile, open(pathfile, 'rb'))} time.sleep(int(vt_sleep_post_file)) response = requests.post(vt_url_post_file, files=files, params=params) json_response = response.json() data = [[(cli_ui.blue, str(myfile)), (cli_ui.blue, str(sha256)), (cli_ui.blue, str(json_response['permalink']))]] data_files_vt.extend(data) else: cli_ui.error("ERROR AL CONSULTAR, VALIDAR API") count_items += 1 cli_ui.info_progress("Done", count_items, len(lista)) except Exception as e: cli_ui.error(str(e))
def process_all(self, projects_and_groups, groups): group_number = 0 successful_groups = 0 failed_groups = {} maybe_output_file = self.try_to_get_output_file() for group in groups: group_number += 1 if group_number < self.start_from_group: info_group_count( "@", group_number, len(groups), cli_ui.yellow, f"Skipping group {group} as requested to start from {self.start_from_group}...", cli_ui.reset, ) continue configuration = self.configuration.get_effective_config_for_group( group) if configuration: info_group_count("@", group_number, len(groups), f"Processing group: {group}") self.try_to_write_header_to_output_file( group, maybe_output_file) try: self.group_processors.process_group( group, configuration, dry_run=self.noop, output_file=maybe_output_file, ) successful_groups += 1 except Exception as e: failed_groups[group_number] = group trace = traceback.format_exc() message = f"Error occurred while processing group {group}, exception:\n\n{e}\n\n{trace}" if self.terminate_after_error: self.try_to_close_output_file(maybe_output_file) cli_ui.error(message) sys.exit(EXIT_PROCESSING_ERROR) else: cli_ui.warning(message) finally: logging.debug( f"@ ({group_number}/{len(groups)}) FINISHED Processing group: {group}" ) else: self.try_to_write_header_to_output_file(group, maybe_output_file, empty_config=True) info_group_count( "@", group_number, len(groups), cli_ui.yellow, f"Skipping group {group} as it has empty effective config.", cli_ui.reset, ) project_number = 0 successful_projects = 0 failed_projects = {} for project_and_group in projects_and_groups: project_number += 1 if project_number < self.start_from: info_project_count( "*", project_number, len(projects_and_groups), cli_ui.yellow, f"Skipping project {project_and_group} as requested to start from {self.start_from}...", cli_ui.reset, ) continue configuration = self.configuration.get_effective_config_for_project( project_and_group) if configuration: info_project_count( "*", project_number, len(projects_and_groups), f"Processing project: {project_and_group}", ) self.try_to_write_header_to_output_file( project_and_group, maybe_output_file) try: self.project_processors.process_project( project_and_group, configuration, dry_run=self.noop, output_file=maybe_output_file, ) successful_projects += 1 except Exception as e: failed_projects[project_number] = project_and_group trace = traceback.format_exc() message = f"Error occurred while processing project {project_and_group}, exception:\n\n{e}\n\n{trace}" if self.terminate_after_error: self.try_to_close_output_file(maybe_output_file) cli_ui.error(message) sys.exit(EXIT_PROCESSING_ERROR) else: cli_ui.warning(message) finally: logging.debug( f"* ({project_number}/{len(projects_and_groups)}) FINISHED Processing project: {project_and_group}", ) else: self.try_to_write_header_to_output_file(project_and_group, maybe_output_file, empty_config=True) info_project_count( "*", project_number, len(projects_and_groups), cli_ui.yellow, f"Skipping project {project_and_group} as it has empty effective config.", cli_ui.reset, ) self.try_to_close_output_file(maybe_output_file) cli_ui.info_1( f"# of groups processed successfully: {successful_groups}") cli_ui.info_1( f"# of projects processed successfully: {successful_projects}") if len(failed_groups) > 0: cli_ui.info_1(cli_ui.red, f"# of groups failed: {len(failed_groups)}", cli_ui.reset) for group_number in failed_groups.keys(): cli_ui.info_1( cli_ui.red, f"Failed group {group_number}: {failed_groups[group_number]}", cli_ui.reset, ) if len(failed_projects) > 0: cli_ui.info_1( cli_ui.red, f"# of projects failed: {len(failed_projects)}", cli_ui.reset, ) for project_number in failed_projects.keys(): cli_ui.info_1( cli_ui.red, f"Failed project {project_number}: {failed_projects[project_number]}", cli_ui.reset, ) if len(failed_groups) > 0 or len(failed_projects) > 0: sys.exit(EXIT_PROCESSING_ERROR) elif successful_groups > 0 or successful_projects > 0: shine = cli_ui.Symbol("✨", "!!!") cli_ui.info_1( cli_ui.green, f"All requested groups/projects processes successfully!", cli_ui.reset, shine, )
def print_error(self) -> None: ui.error( "Current branch (%s)" % self.branch, "does not track anything. Cannot push." )
def _process_configuration(self, project_and_group: str, configuration: dict): for file in sorted(configuration["files"]): logging.debug("Processing file '%s'...", file) if configuration.get("files|" + file + "|skip"): logging.debug("Skipping file '%s'", file) continue all_branches = self.gitlab.get_branches(project_and_group) if configuration["files"][file]["branches"] == "all": branches = sorted(all_branches) elif configuration["files"][file]["branches"] == "protected": protected_branches = self.gitlab.get_protected_branches( project_and_group) branches = sorted(protected_branches) else: branches = [] for branch in configuration["files"][file]["branches"]: if branch in all_branches: branches.append(branch) else: message = f"! Branch '{branch}' not found, not processing file '{file}' in it" if self.strict: cli_ui.error(message) sys.exit(EXIT_INVALID_INPUT) else: cli_ui.warning(message) for branch in branches: cli_ui.debug(f"Processing file '{file}' in branch '{branch}'") # unprotect protected branch temporarily for operations below if configuration.get("branches|" + branch + "|protected"): logging.debug( "> Temporarily unprotecting the branch for managing files in it..." ) self.gitlab.unprotect_branch(project_and_group, branch) if configuration.get("files|" + file + "|delete"): try: self.gitlab.get_file(project_and_group, branch, file) logging.debug("Deleting file '%s' in branch '%s'", file, branch) self.gitlab.delete_file( project_and_group, branch, file, self.get_commit_message_for_file_change( "delete", configuration.get("files|" + file + "|skip_ci"), ), ) except NotFoundException: logging.debug( "Not deleting file '%s' in branch '%s' (already doesn't exist)", file, branch, ) else: # change or create file if configuration.get("files|" + file + "|content") and configuration.get( "files|" + file + "|file"): cli_ui.error( f"File '{file}' in '{project_and_group}' has both `content` and `file` set - " "use only one of these keys.") sys.exit(EXIT_INVALID_INPUT) elif configuration.get("files|" + file + "|content"): new_content = configuration.get("files|" + file + "|content") else: path_in_config = Path( configuration.get("files|" + file + "|file")) if path_in_config.is_absolute(): path = path_in_config.read_text() else: # relative paths are relative to config file location path = Path( os.path.join(self.config.config_dir, str(path_in_config))) new_content = path.read_text() if configuration.get("files|" + file + "|template", True): new_content = self.get_file_content_as_template( new_content, project_and_group, **configuration.get("files|" + file + "|jinja_env", dict()), ) try: current_content = self.gitlab.get_file( project_and_group, branch, file) if current_content != new_content: if configuration.get("files|" + file + "|overwrite"): logging.debug( "Changing file '%s' in branch '%s'", file, branch) self.gitlab.set_file( project_and_group, branch, file, new_content, self.get_commit_message_for_file_change( "change", configuration.get("files|" + file + "|skip_ci"), ), ) else: logging.debug( "Not changing file '%s' in branch '%s' " "(overwrite flag not set)", file, branch, ) else: logging.debug( "Not changing file '%s' in branch '%s' (it's content is already" " as provided)", file, branch, ) except NotFoundException: logging.debug("Creating file '%s' in branch '%s'", file, branch) self.gitlab.add_file( project_and_group, branch, file, new_content, self.get_commit_message_for_file_change( "add", configuration.get("files|" + file + "|skip_ci")), ) # protect branch back after above operations if configuration.get("branches|" + branch + "|protected"): logging.debug("> Protecting the branch again.") self.branch_protector.protect_branch( project_and_group, configuration, branch) if configuration.get("files|" + file + "|only_first_branch"): cli_ui.debug( "Skipping other branches for this file, as configured." ) break
def die(message: str) -> None: ui.error(message) print(EPILOG, end="") sys.exit(1)
def _process_configuration(self, project_and_group: str, configuration: dict): groups = configuration.get("members|groups") if groups: current_groups = self.gitlab.get_groups_from_project( project_and_group) for group in groups: expires_at = (groups[group]["expires_at"].strftime("%Y-%m-%d") if "expires_at" in groups[group] else None) access_level = (groups[group]["group_access"] if "group_access" in groups[group] else None) # we only add the group if it doesn't have the correct settings if (group in current_groups and expires_at == current_groups[group]["expires_at"] and access_level == current_groups[group]["group_access_level"]): logging.info( "Ignoring group '%s' as it is already a member", group) logging.info("Current settings for '%s' are: %s" % (group, current_groups[group])) else: logging.debug("Setting group '%s' as a member", group) access = access_level expiry = expires_at # we will remove group access first and then re-add them, # to ensure that the groups have the expected access level self.gitlab.unshare_with_group(project_and_group, group) self.gitlab.share_with_group(project_and_group, group, access, expiry) users = configuration.get("members|users") if users: current_members = self.gitlab.get_members_from_project( project_and_group) for user in users: expires_at = (users[user]["expires_at"].strftime("%Y-%m-%d") if "expires_at" in users[user] else None) access_level = (users[user]["access_level"] if "access_level" in users[user] else None) # we only add the user if it doesn't have the correct settings if (user in current_members and expires_at == current_members[user]["expires_at"] and access_level == current_members[user]["access_level"]): logging.info( "Ignoring user '%s' as it is already a member", user) logging.info("Current settings for '%s' are: %s" % (user, current_members[user])) else: logging.info("Setting user '%s' as a member", user) access = access_level expiry = expires_at self.gitlab.remove_member_from_project( project_and_group, user) self.gitlab.add_member_to_project(project_and_group, user, access, expiry) if not groups and not users: cli_ui.error("Project members configuration section has to contain" " either 'users' or 'groups' non-empty keys.") sys.exit(EXIT_INVALID_INPUT)
def test_quiet(message_recorder: MessageRecorder) -> None: cli_ui.setup(quiet=True) cli_ui.info("info") cli_ui.error("error") assert message_recorder.find("error") assert not message_recorder.find("info")
def print_error(self) -> None: ui.error(ui.reset, "`%s`" % self.cmd, "exited with return code", self.rc)
def print_error(self) -> None: ui.error("Repository is dirty") ui.info(self.git_status_output)
def on_failure(self, *, num_errors: int) -> None: ui.error("Failed to synchronize workspace")
def print_error(self) -> None: ui.error("Not on any branch")
def print_error(self) -> None: ui.error("Could not parse", self.version, "as a valid version string")
def print_error(self) -> None: ui.error("git ref", self.ref, "already exists")
def print_error(self) -> None: ui.error(self.src, "does not exist")
def on_failure(self, *, num_errors: int) -> None: ui.error("Failed to clone missing repos")
def print_error(self) -> None: ui.error( "Current version string: (%s)" % self.current_version_string, "not found in", self.src, )
def bump(options: BumpOptions) -> None: working_path = options.working_path new_version = options.new_version interactive = options.interactive only_patch = options.only_patch dry_run = options.dry_run config = parse_config(options.working_path) # fmt: off ui.info_1( "Bumping from", ui.bold, config.current_version, ui.reset, "to", ui.bold, new_version, ) # fmt: on git_bumper = GitBumper(working_path) git_bumper.set_config(config) git_state_error = None try: git_bumper.check_dirty() # Avoid data loss if not only_patch: git_bumper.check_branch_state(new_version) except tbump.git.GitError as e: if dry_run: git_state_error = e else: raise file_bumper = FileBumper(working_path) file_bumper.set_config(config) hooks_runner = HooksRunner(working_path, config.current_version) if not only_patch: for hook in config.hooks: hooks_runner.add_hook(hook) executor = Executor(new_version, file_bumper) if not only_patch: executor.add_git_and_hook_actions(new_version, git_bumper, hooks_runner) if interactive: executor.print_self(dry_run=True) if not dry_run: proceed = ui.ask_yes_no("Looking good?", default=False) if not proceed: raise Cancelled() if dry_run: if git_state_error: ui.error("Git repository state is invalid") git_state_error.print_error() sys.exit(1) else: return executor.print_self(dry_run=False) executor.run() if config.github_url: tag_name = git_bumper.get_tag_name(new_version) suggest_creating_github_release(config.github_url, tag_name)
def _process_configuration(self, group: str, configuration: dict): users_to_set_by_username = configuration.get("group_members") if users_to_set_by_username: # group users before by username users_before = self.gitlab.get_group_members(group) logging.debug("Group members BEFORE: %s", users_before) users_before_by_username = dict() for user in users_before: users_before_by_username[user["username"]] = user # group users to set by access level users_to_set_by_access_level = dict() for user in users_to_set_by_username: access_level = users_to_set_by_username[user]["access_level"] users_to_set_by_access_level.setdefault(access_level, []).append(user) # check if the configured users contain at least one Owner if 50 not in users_to_set_by_access_level.keys( ) and configuration.get("enforce_group_members"): cli_ui.error( "With 'enforce_group_members' flag you cannot have no Owners (access_level = 50) in your " " group members config. GitLab requires at least 1 Owner per group." ) sys.exit(EXIT_INVALID_INPUT) # we HAVE TO start configuring access from Owners to prevent case when there is no Owner # in a group for level in [50, 40, 30, 20, 10]: users_to_set_with_this_level = ( users_to_set_by_access_level[level] if level in users_to_set_by_access_level else []) for user in users_to_set_with_this_level: access_level_to_set = users_to_set_by_username[user][ "access_level"] expires_at_to_set = ( users_to_set_by_username[user]["expires_at"] if "expires_at" in users_to_set_by_username[user] else None) if user in users_before_by_username: access_level_before = users_before_by_username[user][ "access_level"] expires_at_before = users_before_by_username[user][ "expires_at"] if (access_level_before == access_level_to_set and expires_at_before == expires_at_to_set): logging.debug( "Nothing to change for user '%s' - same config now as to set.", user, ) else: logging.debug( "Re-adding user '%s' to change their access level or expires at.", user, ) # we will remove the user first and then re-add they, # to ensure that the user has the expected access level self.gitlab.remove_member_from_group(group, user) self.gitlab.add_member_to_group( group, user, access_level_to_set, expires_at_to_set) else: logging.debug( "Adding user '%s' who previously was not a member.", user) self.gitlab.add_member_to_group( group, user, access_level_to_set, expires_at_to_set) if configuration.get("enforce_group_members"): # remove users not configured explicitly # note: only direct members are removed - inherited are left users_not_configured = set([ user["username"] for user in users_before ]) - set(users_to_set_by_username.keys()) for user in users_not_configured: logging.debug( "Removing user '%s' who is not configured to be a member.", user) self.gitlab.remove_member_from_group(group, user) else: logging.debug("Not enforcing group members.") logging.debug("Group members AFTER: %s", self.gitlab.get_group_members(group)) else: cli_ui.error( "You cannot configure a group to have no members. GitLab requires a group " " to contain at least 1 member who is an Owner (access_level = 50)." ) sys.exit(EXIT_INVALID_INPUT)
def on_failure(self, *, num_errors: int) -> None: ui.error("Failed to perform the following copies:")
def on_failure(self, *, num_errors: int) -> None: ui.error("Failed to configure remotes")
def print_error(self) -> None: if self.io_error: ui.error("Could not read config file:", self.io_error) if self.parse_error: ui.error("Invalid config:", self.parse_error)
def print_error(self) -> None: cmd_str = " ".join(self.cmd) ui.error("Command", "`%s`" % cmd_str, "failed")