def _repo_ref(tmpdir, repo, ref): # if `ref` is explicitly passed, use it if ref: return repo, ref ref = git.head_rev(repo) # if it exists on disk, we'll try and clone it with the local changes if os.path.exists(repo) and git.has_diff('HEAD', repo=repo): logger.warning('Creating temporary repo with uncommitted changes...') shadow = os.path.join(tmpdir, 'shadow-repo') cmd_output_b('git', 'clone', repo, shadow) cmd_output_b('git', 'checkout', ref, '-b', '_pc_tmp', cwd=shadow) idx = git.git_path('index', repo=shadow) objs = git.git_path('objects', repo=shadow) env = dict(os.environ, GIT_INDEX_FILE=idx, GIT_OBJECT_DIRECTORY=objs) staged_files = git.get_staged_files(cwd=repo) if staged_files: xargs(('git', 'add', '--'), staged_files, cwd=repo, env=env) cmd_output_b('git', 'add', '-u', cwd=repo, env=env) git.commit(repo=shadow) return shadow, git.head_rev(shadow) else: return repo, ref
def _repo_ref(tmpdir: str, repo: str, ref: Optional[str]) -> Tuple[str, str]: # if `ref` is explicitly passed, use it if ref is not None: return repo, ref ref = git.head_rev(repo) # if it exists on disk, we'll try and clone it with the local changes if os.path.exists(repo) and git.has_diff("HEAD", repo=repo): logger.warning("Creating temporary repo with uncommitted changes...") shadow = os.path.join(tmpdir, "shadow-repo") cmd_output_b("git", "clone", repo, shadow) cmd_output_b("git", "checkout", ref, "-b", "_pc_tmp", cwd=shadow) idx = git.git_path("index", repo=shadow) objs = git.git_path("objects", repo=shadow) env = dict(os.environ, GIT_INDEX_FILE=idx, GIT_OBJECT_DIRECTORY=objs) staged_files = git.get_staged_files(cwd=repo) if staged_files: xargs(("git", "add", "--"), staged_files, cwd=repo, env=env) cmd_output_b("git", "add", "-u", cwd=repo, env=env) git.commit(repo=shadow) return shadow, git.head_rev(shadow) else: return repo, ref
def _repo_ref(tmpdir, repo, ref): # if `ref` is explicitly passed, use it if ref: return repo, ref ref = git.head_rev(repo) # if it exists on disk, we'll try and clone it with the local changes if os.path.exists(repo) and git.has_diff('HEAD', repo=repo): logger.warning('Creating temporary repo with uncommitted changes...') shadow = os.path.join(tmpdir, 'shadow-repo') cmd_output('git', 'clone', repo, shadow) cmd_output('git', 'checkout', ref, '-b', '_pc_tmp', cwd=shadow) idx = git.git_path('index', repo=shadow) objs = git.git_path('objects', repo=shadow) env = dict(os.environ, GIT_INDEX_FILE=idx, GIT_OBJECT_DIRECTORY=objs) staged_files = git.get_staged_files(cwd=repo) if staged_files: xargs(('git', 'add', '--'), staged_files, cwd=repo, env=env) cmd_output('git', 'add', '-u', cwd=repo, env=env) git.commit(repo=shadow) return shadow, git.head_rev(shadow) else: return repo, ref
def test_get_staged_files_deleted(tempdir_factory): path = git_dir(tempdir_factory) with cwd(path): open('test', 'a').close() cmd_output('git', 'add', 'test') cmd_output('git', 'commit', '-m', 'foo', '--allow-empty') cmd_output('git', 'rm', '--cached', 'test') assert git.get_staged_files() == []
def test_get_staged_files_deleted(tempdir_factory): path = git_dir(tempdir_factory) with cwd(path): open('test', 'a').close() cmd_output('git', 'add', 'test') cmd_output('git', 'commit', '-m', 'foo', '--allow-empty') cmd_output('git', 'rm', '--cached', 'test') assert git.get_staged_files() == []
def _all_filenames(args: argparse.Namespace) -> Collection[str]: if args.origin and args.source: return git.get_changed_files(args.origin, args.source) elif args.hook_stage in {'prepare-commit-msg', 'commit-msg'}: return (args.commit_msg_filename, ) elif args.files: return args.files elif args.all_files: return git.get_all_files() elif git.is_in_merge_conflict(): return git.get_conflicted_files() else: return git.get_staged_files()
def _all_filenames(args): if args.origin and args.source: return git.get_changed_files(args.origin, args.source) elif args.hook_stage == 'commit-msg': return (args.commit_msg_filename, ) elif args.files: return args.files elif args.all_files: return git.get_all_files() elif git.is_in_merge_conflict(): return git.get_conflicted_files() else: return git.get_staged_files()
def _all_filenames(args): if args.origin and args.source: return git.get_changed_files(args.origin, args.source) elif args.hook_stage in {'prepare-commit-msg', 'commit-msg'}: return (args.commit_msg_filename,) elif args.files: return args.files elif args.all_files: return git.get_all_files() elif git.is_in_merge_conflict(): return git.get_conflicted_files() else: return git.get_staged_files()
def _all_filenames(args: argparse.Namespace) -> Collection[str]: if args.hook_stage == 'post-checkout': # no files for post-checkout return () elif args.hook_stage in {'prepare-commit-msg', 'commit-msg'}: return (args.commit_msg_filename, ) elif args.from_ref and args.to_ref: return git.get_changed_files(args.from_ref, args.to_ref) elif args.files: return args.files elif args.all_files: return git.get_all_files() elif git.is_in_merge_conflict(): return git.get_conflicted_files() else: return git.get_staged_files()
def _all_filenames(args: argparse.Namespace) -> Collection[str]: # these hooks do not operate on files if args.hook_stage in {"post-checkout", "post-commit"}: return () elif args.hook_stage in {"prepare-commit-msg", "commit-msg"}: return (args.commit_msg_filename,) elif args.from_ref and args.to_ref: return git.get_changed_files(args.from_ref, args.to_ref) elif args.files: return args.files elif args.all_files: return git.get_all_files() elif git.is_in_merge_conflict(): return git.get_conflicted_files() else: return git.get_staged_files()
def test_get_staged_files_deleted(in_git_dir): in_git_dir.join('test').ensure() cmd_output('git', 'add', 'test') git_commit() cmd_output('git', 'rm', '--cached', 'test') assert git.get_staged_files() == []
def test_staged_files_non_ascii(non_ascii_repo): non_ascii_repo.join('интервью').write('hi') cmd_output('git', 'add', '.') assert git.get_staged_files() == ['интервью']
def check( context: Context, formatter: Tuple[str, ...] = (), pager: bool = True, show_all: bool = False, staged_only: bool = False, tool: Optional[str] = None, paths: Optional[List[str]] = None, ) -> None: """ Checks for new findings. Only findings not previously archived will be displayed (use --show-all to display archived findings). By default, 'bento check' will check the entire project. To run on one or more paths only, run: bento check path1 path2 ... """ if tool and tool not in context.configured_tools: click.echo( f"{tool} has not been configured. Adding default configuration for tool to .bento.yml" ) update_tool_run(context, tool, False) # Set configured_tools to None so that future calls will # update and include newly added tool context._configured_tools = None if not context.config_path.exists(): echo_error("No Bento configuration found. Please run `bento init`.") sys.exit(3) if not show_all and context.baseline_file_path.exists(): with context.baseline_file_path.open() as json_file: baseline = bento.result.yml_to_violation_hashes(json_file) else: baseline = {} config = context.config if formatter: config["formatter"] = [{f: {}} for f in formatter] fmts = context.formatters findings_to_log: List[Any] = [] click.echo("Running Bento checks...\n", err=True) ctx = noop_context() if paths and len(paths) > 0: if staged_only: raise Exception( "--staged_only should not be used with explicit paths") elif staged_only: ctx = staged_files_only( os.path.join(os.path.expanduser("~"), ".cache", "bento", "patches")) paths = get_staged_files() else: paths = None with ctx: before = time.time() runner = bento.tool_runner.Runner() tools: Iterable[Tool[Any]] = context.tools.values() if tool: tools = [context.configured_tools[tool]] all_results = runner.parallel_results(tools, baseline, paths) elapsed = time.time() - before # Progress bars terminate on whitespace echo_newline() is_error = False n_all = 0 n_all_filtered = 0 filtered_findings: Dict[str, List[Violation]] = {} for tool_id, findings in all_results: if isinstance(findings, Exception): logging.error(findings) echo_error(f"Error while running {tool_id}: {findings}") if isinstance(findings, subprocess.CalledProcessError): click.secho(findings.stderr, err=True) click.secho(findings.stdout, err=True) if isinstance(findings, NodeError): echo_warning( f"Node.js not found or version is not compatible with ESLint v6." ) click.secho( f"""------------------------------------------------------------------------------------------------- This may be due to a corrupted tool installation. You might be able to fix this issue by running: bento init --clean You can also view full details of this error in `{bento.constants.DEFAULT_LOG_PATH}`. ------------------------------------------------------------------------------------------------- """, err=True, ) is_error = True elif isinstance(findings, list) and findings: findings_to_log += bento.metrics.violations_to_metrics( tool_id, context.timestamp, findings, __get_ignores_for_tool(tool_id, config), ) filtered = [f for f in findings if not f.filtered] filtered_findings[tool_id] = filtered n_all += len(findings) n_filtered = len(filtered) n_all_filtered += n_filtered logging.debug(f"{tool_id}: {n_filtered} findings passed filter") def post_metrics() -> None: bento.network.post_metrics(findings_to_log, is_finding=True) stats_thread = threading.Thread(name="stats", target=post_metrics) stats_thread.start() if n_all_filtered > 0: dumped = [f.dump(filtered_findings) for f in fmts] context.start_user_timer() bento.util.less(dumped, pager=pager, overrun_pages=OVERRUN_PAGES) context.stop_user_timer() echo_warning(f"{n_all_filtered} finding(s) in {elapsed:.2f} s\n") if not context.is_init: echo_next_step("To suppress all findings", "bento archive") else: echo_success(f"0 findings in {elapsed:.2f} s\n") n_archived = n_all - n_all_filtered if n_archived > 0 and not show_all: echo_next_step( f"Not showing {n_archived} archived finding(s). To view", f"bento check {SHOW_ALL}", ) if is_error: sys.exit(3) elif n_all_filtered > 0: sys.exit(2)
def test_get_staged_files_deleted(in_git_dir): in_git_dir.join('test').ensure() cmd_output('git', 'add', 'test') git_commit() cmd_output('git', 'rm', '--cached', 'test') assert git.get_staged_files() == []
def test_staged_files_non_ascii(non_ascii_repo): non_ascii_repo.join('интервью').write('hi') cmd_output('git', 'add', '.') assert git.get_staged_files() == ['интервью']