def update_tool_run(context: Context, tool: str, run: bool) -> None: """Sets run field of tool to RUN. Default to no ignore if tool not in config """ # Check that TOOL is valid if tool not in {t.tool_id() for t in bento.extra.TOOLS}: echo_error( f"No tool named '{tool}'. See help text for list of available tools." ) sys.exit(3) config = context.config tool_config = config["tools"] if tool not in tool_config: # Read default ignore from default config file with (open( os.path.join(os.path.dirname(__file__), "configs/default.yml"))) as template: yml = yaml.safe_load(template) default_ignore: List[str] = [] if tool in yml["tools"]: default_ignore = yml["tools"][tool]["ignore"] tool_config[tool] = {"ignore": default_ignore} tool_config[tool]["run"] = run context.config = config
def uninstall_autorun(context: Context) -> None: """ Configures Bento to NOT run automatically on commits. Autorun is only removed for the project from which this command is run. """ import git # import inside def for performance # Get hook path repo = bento.git.repo(context.base_path) if repo is None: echo_error("Not a git project") sys.exit(3) hook_path = Path(git.index.fun.hook_path("pre-commit", repo.git_dir)) if not _is_bento_precommit(hook_path): echo_warning( "Not uninstalling autorun: Bento is not configured for autorun on this project." ) sys.exit(1) else: # Put back legacy hook if exits legacy_hook_path = Path(f"{hook_path}.pre-bento") if legacy_hook_path.exists(): shutil.move(legacy_hook_path, hook_path) else: hook_path.unlink() echo_success("Uninstalled Bento autorun.") echo_next_step("To enable autorun", "bento enable autorun")
def _identify_git(self) -> None: repo = bento.git.repo(self.context.base_path) if repo is None: echo_error( "Current directory is not part of a Git project. Bento only works for Git projects." ) sys.exit(3)
def _abort_if_untracked_and_removed(self, removed: List[str]) -> None: """ Raises UnsupportedGitStateException if any path is removed from the git index but also appears in the filesystem. :param removed (list): Removed paths :raises UnsupportedGitStateException: If any removed paths are present on Filesystem """ untracked_removed = [ r.replace(" ", r"\ ") for r in removed if Path(r).exists() ] if untracked_removed: joined = " ".join(untracked_removed) def echo_cmd(cmd: str) -> None: click.echo(f" $ {click.style(cmd, bold=True)}\n", err=True) echo_error( "One or more files deleted from git exist on the filesystem. Aborting to prevent data loss. To " "continue, please stash by running the following two commands:" ) echo_newline() echo_cmd(f"git stash -u -- {joined}") echo_cmd(f"git rm {joined}") click.secho( "Stashed changes can later be recovered by running:\n", err=True, fg=Colors.ERROR, ) echo_cmd(f"git stash pop") raise UnsupportedGitStateException()
def _validate_shell() -> Tuple[Path, str]: """ Gets the profile file and completion text for the current shell :raises SystemExit: If not in a valid shell """ shell_env = os.environ.get("SHELL") logging.info(f"Using shell {shell_env}") if not shell_env: echo_error( f"This command must be executed within one of the {VALID} shells. (Currently not in a shell)." ) sys.exit(1) shell = shell_env.split("/")[-1] if shell not in SUPPORTED: echo_error( f"This command must be executed within one of the {VALID} shells. (Currently using {shell})." ) sys.exit(1) profile, text = SUPPORTED[shell] path = Path.home() / profile return path, text
def main() -> None: try: cli(auto_envvar_prefix="BENTO") # Catch custom exceptions, output the right message and exit. # Note: this doesn't catch all Exceptions and lets them bubble up. except BentoException as e: if e.msg: echo_error(e.msg) sys.exit(3)
def _head_context(self) -> Iterator[None]: """ Runs a block of code on files from the current branch HEAD. :raises subprocess.CalledProcessError: If git encounters an exception :raises NoGitHeadException: If git cannot detect a HEAD commit :raises UnsupportedGitStateException: If unmerged files are detected """ repo = bento.git.repo() if not repo: yield return commit = bento.git.commit() if commit is None: raise NoGitHeadException() else: added, removed, unmerged = self._git_status() # Need to look for unmerged files first, otherwise staged_files_only will eat them if unmerged: echo_error( "Please resolve merge conflicts in these files before continuing:" ) for f in unmerged: click.secho(f, err=True) raise UnsupportedGitStateException() with staged_files_only(PATCH_CACHE): tree = cmd_output("git", "write-tree")[1].strip() self._abort_if_untracked_and_removed(removed) try: for a in added: (repo.working_tree_dir / Path(a)).unlink() cmd_output("git", "checkout", "HEAD", "--", ".") yield finally: # git checkout will fail if the checked-out index deletes all files in the repo # In this case, we still want to continue without error. # Note that we have no good way of detecting this issue without inspecting the checkout output # message, which means we are fragile with respect to git version here. try: cmd_output("git", "checkout", tree.strip(), "--", ".") except CalledProcessError as ex: if (ex.output and len(ex.output) >= 2 and "pathspec '.' did not match any file(s) known to git" in ex.output[1].strip()): logging.warning( "Restoring git index failed due to total repository deletion; skipping checkout" ) else: raise ex if removed: cmd_output("git", "rm", *removed)
def __log_exception(e: Exception) -> None: logging.exception(e) if isinstance(e, subprocess.CalledProcessError): cmd = e.cmd if isinstance(e.cmd, list): cmd = " ".join([str(part) for part in e.cmd]) echo_warning(f'Could not execute "{cmd}":\n{e.stderr}') logging.error(e.stdout) logging.error(e.stderr) else: echo_error(f"There was an exception {e}")
def install_autorun(context: Context, block: bool) -> None: """ Configures Bento to automatically run on commits. Autorun is configured only for you; it does not affect other contributors to this project. Autorun is only configured for the project from which this command is run. By default, Bento will block commits if it finds an issue. To prevent autorun from blocking commits, run: $ bento enable autorun --no-block """ import git # import inside def for performance # Get hook path repo = bento.git.repo(context.base_path) if repo is None: echo_error("Not a git project") sys.exit(3) _configure_block(context, block) hook_path = Path(git.index.fun.hook_path("pre-commit", repo.git_dir)) if _is_bento_precommit(hook_path): _notify_install(context, block) else: legacy_hook_path = Path(f"{hook_path}.pre-bento") if hook_path.exists(): # If pre-commit hook already exists move it over if legacy_hook_path.exists(): raise Exception( f"Autorun could not be configured: A legacy pre-commit hook exists. Please remove {hook_path}.pre-bento to continue." ) else: # Check that shutil.move(hook_path, legacy_hook_path) # Copy pre-commit script template to hook_path template_location = os.path.join( os.path.dirname(__file__), "../resources/pre-commit.template" ) shutil.copyfile(template_location, hook_path) # Make file executable original_mode = hook_path.stat().st_mode os.chmod(hook_path, original_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) _notify_install(context, block)
def update_ignores(context: Context, tool: str, update_func: Callable[[Set[str]], None]) -> None: config = context.config tool_config = config["tools"] if tool not in tool_config: all_tools = ", ".join(f"'{k}'" for k in tool_config.keys()) echo_error(f"No tool named '{tool}'. Configured tools are {all_tools}") sys.exit(3) ignores = set(tool_config[tool].get("ignore", [])) update_func(ignores) tool_config[tool]["ignore"] = list(ignores) context.config = config
def _load_configured_tools(self) -> Dict[str, Tool]: """ Returns list of this project's configured tools (disabled and enabled) """ tools: Dict[str, Tool] = {} inventory = self.tool_inventory for tn in self.config["tools"].keys(): ti = inventory.get(tn, None) if not ti: # TODO: Move to display layer echo_error(f"No tool named '{tn}' could be found") continue tools[tn] = ti(self) return tools
def _load_enabled_tools(self) -> Dict[str, Tool]: """ Returns a list of this project's enabled tools These are the tools in the configuration file that do not have "run" option set to False """ tools: Dict[str, Tool] = {} inventory = self.tool_inventory for tn, tool_config in self.config["tools"].items(): if "run" in tool_config and not tool_config["run"]: continue ti = inventory.get(tn, None) if not ti: # TODO: Move to display layer echo_error(f"No tool named '{tn}' could be found") continue tools[tn] = ti(self) return tools
def cli(ctx: click.Context, base_path: Optional[str], agree: bool, email: Optional[str]) -> None: __setup_logging() is_init = ctx.invoked_subcommand == "init" ctx.help_option_names = ["-h", "--help"] if base_path is None: ctx.obj = Context(is_init=is_init) else: ctx.obj = Context(base_path=base_path, is_init=is_init) if not is_running_supported_python3(): echo_error( "Bento requires Python 3.6+. Please ensure you have Python 3.6+ and installed Bento via `pip3 install bento-cli`." ) sys.exit(3) registrar = register.Registrar(ctx.obj, agree, email=email) if not registrar.verify(): logging.error("Could not verify the user's registration.") sys.exit(3) if not is_running_latest(): logging.warning("Bento client is outdated") click.echo(constants.UPGRADE_WARNING_OUTPUT)
def check( context: Context, all_: bool = False, formatter: Tuple[str, ...] = (), pager: bool = True, tool: Optional[str] = None, staged_only: bool = False, # Should not be used. Legacy support for old pre-commit hooks paths: Tuple[Path, ...] = (), ) -> None: """ Checks for new findings. By default, only staged files are checked. New findings introduced by these staged changes AND that are not in the archive (`.bento/archive.json`) will be shown. Use `--all` to check all Git tracked files, not just those that are staged: $ bento check --all [PATHS] Optional PATHS can be specified to check specific directories or files. See `bento archive --help` to learn about suppressing findings. """ # Fail out if not configured if not context.config_path.exists(): raise NoConfigurationException() # Fail out if no .bentoignore if not context.ignore_file_path.exists(): raise NoIgnoreFileException(context) # Default to no path filter if len(paths) < 1: path_list = [context.base_path] else: path_list = list(paths) # Handle specified tool that is not configured if tool and tool not in context.configured_tools: click.echo( f"{tool} has not been configured. Adding default configuration for tool to {bento.constants.CONFIG_FILE_NAME}" ) update_tool_run(context, tool, False) # Set configured_tools to None so that future calls will # update and include newly added tool context._configured_tools = None # Handle specified formatters if formatter: context.config["formatter"] = [{f: {}} for f in formatter] if all_: click.echo(f"Running Bento checks on all tracked files...\n", err=True) else: click.echo(f"Running Bento checks on staged files...\n", err=True) tools: Iterable[Tool[Any]] = context.tools.values() if tool: tools = [context.configured_tools[tool]] baseline: Baseline = {} if context.baseline_file_path.exists(): with context.baseline_file_path.open() as json_file: baseline = bento.result.json_to_violation_hashes(json_file) target_file_manager = TargetFileManager( context.base_path, path_list, not all_, context.ignore_file_path ) all_results, elapsed = bento.orchestrator.orchestrate( baseline, target_file_manager, not all_, tools ) fmts = context.formatters findings_to_log: List[Any] = [] n_all = 0 n_all_filtered = 0 filtered_findings: Dict[str, List[Violation]] = {} for tool_id, findings in all_results: if isinstance(findings, Exception): logging.error(findings) echo_error(f"Error while running {tool_id}: {findings}") if isinstance(findings, BentoException): click.secho(findings.msg, err=True) else: if isinstance(findings, subprocess.CalledProcessError): click.secho(findings.stderr, err=True) click.secho(findings.stdout, err=True) if isinstance(findings, NodeError): echo_warning( f"Node.js not found or version is not compatible with ESLint v6." ) click.secho( f"""------------------------------------------------------------------------------------------------- This may be due to a corrupted tool installation. You might be able to fix this issue by running: bento init --clean You can also view full details of this error in `{bento.constants.DEFAULT_LOG_PATH}`. ------------------------------------------------------------------------------------------------- """, err=True, ) context.error_on_exit(ToolRunException()) elif isinstance(findings, list) and findings: findings_to_log += bento.metrics.violations_to_metrics( tool_id, context.timestamp, findings, __get_ignores_for_tool(tool_id, context.config), ) filtered = [f for f in findings if not f.filtered] filtered_findings[tool_id] = filtered n_all += len(findings) n_filtered = len(filtered) n_all_filtered += n_filtered logging.debug(f"{tool_id}: {n_filtered} findings passed filter") def post_metrics() -> None: bento.network.post_metrics(findings_to_log, is_finding=True) stats_thread = threading.Thread(name="stats", target=post_metrics) stats_thread.start() dumped = [f.dump(filtered_findings) for f in fmts] context.start_user_timer() bento.util.less(dumped, pager=pager, overrun_pages=OVERRUN_PAGES) context.stop_user_timer() finding_source_text = "in this project" if all_ else "due to staged changes" if n_all_filtered > 0: echo_warning( f"{n_all_filtered} finding(s) {finding_source_text} in {elapsed:.2f} s" ) click.secho("\nPlease fix these issues, or:\n", err=True) echo_next_step("To archive findings as tech debt", f"bento archive") echo_next_step("To disable a specific check", f"bento disable check TOOL CHECK") else: echo_success(f"0 findings {finding_source_text} in {elapsed:.2f} s\n") n_archived = n_all - n_all_filtered if n_archived > 0: echo_next_step( f"Not showing {n_archived} archived finding(s). To view", "cat .bento/archive.json", ) if not all_ and not context.autorun_is_blocking: return elif context.on_exit_exception: raise context.on_exit_exception elif n_all_filtered > 0: sys.exit(2)
def archive(context: Context, show_bars: bool = True) -> None: """ Adds all current findings to the whitelist. """ if not context.is_init: click.secho("Running Bento archive...\n" "", err=True) if not context.config_path.exists(): echo_error("No Bento configuration found. Please run `bento init`.") sys.exit(3) if context.baseline_file_path.exists(): with context.baseline_file_path.open() as json_file: old_baseline = bento.result.yml_to_violation_hashes(json_file) old_hashes = {h for hh in old_baseline.values() for h in hh} else: old_hashes = set() new_baseline: List[str] = [] tools = context.tools.values() all_findings = bento.tool_runner.Runner( show_bars=show_bars).parallel_results(tools, {}, None) n_found = 0 n_existing = 0 found_hashes: Set[str] = set() if show_bars: echo_newline() for tool_id, vv in all_findings: if isinstance(vv, Exception): raise vv n_found += len(vv) new_baseline += bento.result.tool_results_to_yml(tool_id, vv) for v in vv: h = v.syntactic_identifier_str() found_hashes.add(h) if h in old_hashes: n_existing += 1 n_new = n_found - n_existing n_removed = len(old_hashes - found_hashes) context.baseline_file_path.parent.mkdir(exist_ok=True, parents=True) with context.baseline_file_path.open("w") as json_file: json_file.writelines(new_baseline) success_str = click.style(f"Project analyzed with {len(tools)} tool(s).", bold=True) success_str += ( f"\n{n_new} finding(s) were archived, and will be hidden in future Bento runs." ) if n_existing > 0: success_str += f"\nBento also kept {n_existing} existing findings" if n_removed > 0: success_str += f" and removed {n_removed} fixed findings." else: success_str += "." elif n_removed > 0: success_str += f"\nBento also removed {n_removed} fixed findings." click.echo(success_str, err=True) if not context.is_init: echo_newline() echo_next_step("To view archived results", "bento check --show-all") click.echo( f"\nPlease check '{context.pretty_path(context.baseline_file_path)}' in to source control.", err=True, )
def archive(context: Context, all_: bool, paths: Tuple[Path, ...]) -> None: """ Suppress current findings. By default, only results introduced by currently staged changes will be added to the archive (`.bento/archive.json`). Archived findings will not appear in future `bento check` output and will not block commits if `autorun` is enabled. Use `--all` to archive findings in all Git tracked files, not just those that are staged: $ bento archive --all [PATHS] Optional PATHS can be specified to archive results from specific directories or files. Archived findings are viewable in `.bento/archive.json`. """ # Default to no path filter if len(paths) < 1: path_list = [context.base_path] else: path_list = list(paths) if not context.is_init: if all_: click.echo(f"Running Bento archive on all tracked files...\n", err=True) else: click.echo(f"Running Bento archive on staged files...\n", err=True) if not context.config_path.exists(): echo_error("No Bento configuration found. Please run `bento init`.") sys.exit(3) if context.baseline_file_path.exists(): with context.baseline_file_path.open() as json_file: old_baseline = bento.result.load_baseline(json_file) old_hashes = { h for findings in old_baseline.values() for h in findings.get(VIOLATIONS_KEY, {}).keys() } else: old_baseline = {} old_hashes = set() new_baseline: Dict[str, Dict[str, Dict[str, Any]]] = {} tools = context.tools.values() target_file_manager = TargetFileManager( context.base_path, path_list, not all_, context.ignore_file_path ) target_paths = target_file_manager.get_target_files() all_findings, elapsed = bento.orchestrator.orchestrate( context, target_paths, not all_, tools ) n_found = 0 n_existing = 0 found_hashes: Set[str] = set() for tool_id, vv in all_findings: if isinstance(vv, Exception): raise vv # Remove filtered vv = [f for f in vv if not f.filtered] n_found += len(vv) new_baseline[tool_id] = bento.result.dump_results(vv) if tool_id in old_baseline: new_baseline[tool_id][VIOLATIONS_KEY].update( old_baseline[tool_id][VIOLATIONS_KEY] ) for v in vv: h = v.syntactic_identifier_str() found_hashes.add(h) if h in old_hashes: n_existing += 1 n_new = n_found - n_existing context.baseline_file_path.parent.mkdir(exist_ok=True, parents=True) with context.baseline_file_path.open("w") as json_file: bento.result.write_tool_results(json_file, new_baseline) finding_source_text = "in this project" if all_ else "due to staged changes" success_str = f"{n_new} finding(s) {finding_source_text} were archived, and will be hidden in future Bento runs." if n_existing > 0: success_str += f"\nBento also kept {n_existing} existing finding(s)." click.echo(success_str, err=True) if not context.is_init: echo_newline() echo_next_step("To view archived results", "cat .bento/archive.json")
def check( context: Context, formatter: Tuple[str, ...] = (), pager: bool = True, show_all: bool = False, staged_only: bool = False, tool: Optional[str] = None, paths: Optional[List[str]] = None, ) -> None: """ Checks for new findings. Only findings not previously archived will be displayed (use --show-all to display archived findings). By default, 'bento check' will check the entire project. To run on one or more paths only, run: bento check path1 path2 ... """ if tool and tool not in context.configured_tools: click.echo( f"{tool} has not been configured. Adding default configuration for tool to .bento.yml" ) update_tool_run(context, tool, False) # Set configured_tools to None so that future calls will # update and include newly added tool context._configured_tools = None if not context.config_path.exists(): echo_error("No Bento configuration found. Please run `bento init`.") sys.exit(3) if not show_all and context.baseline_file_path.exists(): with context.baseline_file_path.open() as json_file: baseline = bento.result.yml_to_violation_hashes(json_file) else: baseline = {} config = context.config if formatter: config["formatter"] = [{f: {}} for f in formatter] fmts = context.formatters findings_to_log: List[Any] = [] click.echo("Running Bento checks...\n", err=True) ctx = noop_context() if paths and len(paths) > 0: if staged_only: raise Exception( "--staged_only should not be used with explicit paths") elif staged_only: ctx = staged_files_only( os.path.join(os.path.expanduser("~"), ".cache", "bento", "patches")) paths = get_staged_files() else: paths = None with ctx: before = time.time() runner = bento.tool_runner.Runner() tools: Iterable[Tool[Any]] = context.tools.values() if tool: tools = [context.configured_tools[tool]] all_results = runner.parallel_results(tools, baseline, paths) elapsed = time.time() - before # Progress bars terminate on whitespace echo_newline() is_error = False n_all = 0 n_all_filtered = 0 filtered_findings: Dict[str, List[Violation]] = {} for tool_id, findings in all_results: if isinstance(findings, Exception): logging.error(findings) echo_error(f"Error while running {tool_id}: {findings}") if isinstance(findings, subprocess.CalledProcessError): click.secho(findings.stderr, err=True) click.secho(findings.stdout, err=True) if isinstance(findings, NodeError): echo_warning( f"Node.js not found or version is not compatible with ESLint v6." ) click.secho( f"""------------------------------------------------------------------------------------------------- This may be due to a corrupted tool installation. You might be able to fix this issue by running: bento init --clean You can also view full details of this error in `{bento.constants.DEFAULT_LOG_PATH}`. ------------------------------------------------------------------------------------------------- """, err=True, ) is_error = True elif isinstance(findings, list) and findings: findings_to_log += bento.metrics.violations_to_metrics( tool_id, context.timestamp, findings, __get_ignores_for_tool(tool_id, config), ) filtered = [f for f in findings if not f.filtered] filtered_findings[tool_id] = filtered n_all += len(findings) n_filtered = len(filtered) n_all_filtered += n_filtered logging.debug(f"{tool_id}: {n_filtered} findings passed filter") def post_metrics() -> None: bento.network.post_metrics(findings_to_log, is_finding=True) stats_thread = threading.Thread(name="stats", target=post_metrics) stats_thread.start() if n_all_filtered > 0: dumped = [f.dump(filtered_findings) for f in fmts] context.start_user_timer() bento.util.less(dumped, pager=pager, overrun_pages=OVERRUN_PAGES) context.stop_user_timer() echo_warning(f"{n_all_filtered} finding(s) in {elapsed:.2f} s\n") if not context.is_init: echo_next_step("To suppress all findings", "bento archive") else: echo_success(f"0 findings in {elapsed:.2f} s\n") n_archived = n_all - n_all_filtered if n_archived > 0 and not show_all: echo_next_step( f"Not showing {n_archived} archived finding(s). To view", f"bento check {SHOW_ALL}", ) if is_error: sys.exit(3) elif n_all_filtered > 0: sys.exit(2)