Пример #1
0
    def _abort_if_untracked_and_removed(self, removed: List[str]) -> None:
        """
            Raises UnsupportedGitStateException if any path is removed from
            the git index but also appears in the filesystem.

            :param removed (list): Removed paths
            :raises UnsupportedGitStateException: If any removed paths are present on Filesystem
        """
        untracked_removed = [
            r.replace(" ", r"\ ") for r in removed if Path(r).exists()
        ]
        if untracked_removed:
            joined = " ".join(untracked_removed)

            def echo_cmd(cmd: str) -> None:
                click.echo(f"    $ {click.style(cmd, bold=True)}\n", err=True)

            echo_error(
                "One or more files deleted from git exist on the filesystem. Aborting to prevent data loss. To "
                "continue, please stash by running the following two commands:"
            )
            echo_newline()
            echo_cmd(f"git stash -u -- {joined}")
            echo_cmd(f"git rm {joined}")
            click.secho(
                "Stashed changes can later be recovered by running:\n",
                err=True,
                fg=Colors.ERROR,
            )
            echo_cmd(f"git stash pop")
            raise UnsupportedGitStateException()
Пример #2
0
    def _update_email(self) -> None:
        """
        Updates the user's global config with their email address

        If the user has passed an email on the command line, this logic is skipped.
        """
        # import inside def for performance
        from validate_email import validate_email

        if not self.email:
            self.email = self.global_config.get("email")

        if not self.email or not validate_email(self.email):
            content.UpdateEmail.leader.echo()

            email = None
            while not (email and validate_email(email)):
                self.context.start_user_timer()
                self._validate_interactivity()
                email = content.UpdateEmail.prompt.echo(
                    type=str, default=bento.git.user_email())
                self.context.stop_user_timer()
                echo_newline()

            if email != constants.QA_TEST_EMAIL_ADDRESS:
                r = self._post_email_to_mailchimp(email)
                if not r:
                    content.UpdateEmail.failure.echo()

            self.global_config["email"] = email
            persist_global_config(self.global_config)
Пример #3
0
    def _confirm_tos_update(self) -> bool:
        """
        Interactive process to confirm updated agreement to the Terms of Service

        :return: If the user has agreed to the updated ToS
        """
        if constants.TERMS_OF_SERVICE_KEY not in self.global_config:
            self.renderer.echo("confirm-tos", "fresh")
        else:
            # We care that the user has agreed to the current terms of service
            tos_version = self.global_config[constants.TERMS_OF_SERVICE_KEY]

            try:
                agreed_to_version = Version(tos_version)
                if agreed_to_version == Version(
                        constants.TERMS_OF_SERVICE_VERSION):
                    logging.info("User ToS agreement is current")
                    return True
            except InvalidVersion:
                self.renderer.echo("confirm-tos", "invalid-version")
                sys.exit(3)

            self.renderer.echo("confirm-tos", "upgrade")

        self.context.start_user_timer()
        self._validate_interactivity()
        agreed = click.confirm(
            "Continue and agree to Bento's terms of service and privacy policy?",
            default=True,
        )
        echo_newline()
        self.context.stop_user_timer()

        if agreed:
            self.global_config[
                constants.
                TERMS_OF_SERVICE_KEY] = constants.TERMS_OF_SERVICE_VERSION

            persist_global_config(self.global_config)
            return True
        else:
            self.renderer.echo("confirm-tos", "error")
            return False
Пример #4
0
    def _confirm_tos_update(self) -> bool:
        """
        Interactive process to confirm updated agreement to the Terms of Service

        :return: If the user has agreed to the updated ToS
        """
        if constants.TERMS_OF_SERVICE_KEY not in self.global_config:
            content.ConfirmTos.fresh.echo()
        else:
            # We care that the user has agreed to the current terms of service
            tos_version = self.global_config[constants.TERMS_OF_SERVICE_KEY]

            try:
                agreed_to_version = Version(tos_version)
                if agreed_to_version == Version(
                        constants.TERMS_OF_SERVICE_VERSION):
                    logging.info("User ToS agreement is current")
                    return True
            except InvalidVersion:
                content.ConfirmTos.invalid_version.echo()
                raise InvalidVersionException()

            content.ConfirmTos.upgrade.echo()

        self.context.start_user_timer()
        self._validate_interactivity()
        agreed = content.ConfirmTos.prompt.echo()
        echo_newline()
        self.context.stop_user_timer()

        if agreed:
            self.global_config[
                constants.
                TERMS_OF_SERVICE_KEY] = constants.TERMS_OF_SERVICE_VERSION

            persist_global_config(self.global_config)
            return True
        else:
            content.ConfirmTos.error.echo()
            return False
Пример #5
0
    def _update_email(self) -> None:
        """
        Updates the user's global config with their email address

        If the user has passed an email on the command line, this logic is skipped.
        """
        # import inside def for performance
        from validate_email import validate_email

        valid_configured_email = False
        if "email" in self.global_config:
            configured_email = self.global_config.get("email")
            if configured_email is not None:
                valid_configured_email = validate_email(configured_email)

        if (not self.email or
                not validate_email(self.email)) and not valid_configured_email:
            self.renderer.echo("update-email", "leader")

            email = None
            while not (email and validate_email(email)):
                self.context.start_user_timer()
                self._validate_interactivity()
                email = click.prompt(
                    self.renderer.text_at("update-email", "prompt"),
                    type=str,
                    default=bento.git.user_email(),
                )
                self.context.stop_user_timer()
                echo_newline()

            r = self._post_email_to_mailchimp(email)
            if not r:
                self.renderer.echo("update-email", "failure")

            self.global_config["email"] = email
            persist_global_config(self.global_config)
Пример #6
0
def archive(context: Context, all_: bool, paths: Tuple[Path, ...]) -> None:
    """
    Suppress current findings.

    By default, only results introduced by currently staged changes will be
    added to the archive (`.bento/archive.json`). Archived findings will
    not appear in future `bento check` output and will not block commits if
    `autorun` is enabled.

    Use `--all` to archive findings in all Git tracked files, not just those
    that are staged:

        $ bento archive --all [PATHS]

    Optional PATHS can be specified to archive results from specific directories
    or files.

    Archived findings are viewable in `.bento/archive.json`.
    """
    # Default to no path filter
    if len(paths) < 1:
        path_list = [context.base_path]
    else:
        path_list = list(paths)

    if not context.is_init:
        if all_:
            click.echo(f"Running Bento archive on all tracked files...\n", err=True)
        else:
            click.echo(f"Running Bento archive on staged files...\n", err=True)

    if not context.config_path.exists():
        echo_error("No Bento configuration found. Please run `bento init`.")
        sys.exit(3)

    if context.baseline_file_path.exists():
        with context.baseline_file_path.open() as json_file:
            old_baseline = bento.result.load_baseline(json_file)
            old_hashes = {
                h
                for findings in old_baseline.values()
                for h in findings.get(VIOLATIONS_KEY, {}).keys()
            }
    else:
        old_baseline = {}
        old_hashes = set()

    new_baseline: Dict[str, Dict[str, Dict[str, Any]]] = {}
    tools = context.tools.values()

    target_file_manager = TargetFileManager(
        context.base_path, path_list, not all_, context.ignore_file_path
    )
    target_paths = target_file_manager.get_target_files()
    all_findings, elapsed = bento.orchestrator.orchestrate(
        context, target_paths, not all_, tools
    )

    n_found = 0
    n_existing = 0
    found_hashes: Set[str] = set()

    for tool_id, vv in all_findings:
        if isinstance(vv, Exception):
            raise vv
        # Remove filtered
        vv = [f for f in vv if not f.filtered]
        n_found += len(vv)
        new_baseline[tool_id] = bento.result.dump_results(vv)
        if tool_id in old_baseline:
            new_baseline[tool_id][VIOLATIONS_KEY].update(
                old_baseline[tool_id][VIOLATIONS_KEY]
            )
        for v in vv:
            h = v.syntactic_identifier_str()
            found_hashes.add(h)
            if h in old_hashes:
                n_existing += 1

    n_new = n_found - n_existing

    context.baseline_file_path.parent.mkdir(exist_ok=True, parents=True)
    with context.baseline_file_path.open("w") as json_file:
        bento.result.write_tool_results(json_file, new_baseline)

    finding_source_text = "in this project" if all_ else "due to staged changes"
    success_str = f"{n_new} finding(s) {finding_source_text} were archived, and will be hidden in future Bento runs."
    if n_existing > 0:
        success_str += f"\nBento also kept {n_existing} existing finding(s)."

    click.echo(success_str, err=True)

    if not context.is_init:
        echo_newline()
        echo_next_step("To view archived results", "cat .bento/archive.json")
Пример #7
0
def archive(context: Context, show_bars: bool = True) -> None:
    """
    Adds all current findings to the whitelist.
    """
    if not context.is_init:
        click.secho("Running Bento archive...\n" "", err=True)

    if not context.config_path.exists():
        echo_error("No Bento configuration found. Please run `bento init`.")
        sys.exit(3)

    if context.baseline_file_path.exists():
        with context.baseline_file_path.open() as json_file:
            old_baseline = bento.result.yml_to_violation_hashes(json_file)
            old_hashes = {h for hh in old_baseline.values() for h in hh}
    else:
        old_hashes = set()

    new_baseline: List[str] = []
    tools = context.tools.values()

    all_findings = bento.tool_runner.Runner(
        show_bars=show_bars).parallel_results(tools, {}, None)
    n_found = 0
    n_existing = 0
    found_hashes: Set[str] = set()
    if show_bars:
        echo_newline()

    for tool_id, vv in all_findings:
        if isinstance(vv, Exception):
            raise vv
        n_found += len(vv)
        new_baseline += bento.result.tool_results_to_yml(tool_id, vv)
        for v in vv:
            h = v.syntactic_identifier_str()
            found_hashes.add(h)
            if h in old_hashes:
                n_existing += 1

    n_new = n_found - n_existing
    n_removed = len(old_hashes - found_hashes)

    context.baseline_file_path.parent.mkdir(exist_ok=True, parents=True)
    with context.baseline_file_path.open("w") as json_file:
        json_file.writelines(new_baseline)

    success_str = click.style(f"Project analyzed with {len(tools)} tool(s).",
                              bold=True)
    success_str += (
        f"\n{n_new} finding(s) were archived, and will be hidden in future Bento runs."
    )
    if n_existing > 0:
        success_str += f"\nBento also kept {n_existing} existing findings"
        if n_removed > 0:
            success_str += f" and removed {n_removed} fixed findings."
        else:
            success_str += "."
    elif n_removed > 0:
        success_str += f"\nBento also removed {n_removed} fixed findings."

    click.echo(success_str, err=True)

    if not context.is_init:
        echo_newline()
        echo_next_step("To view archived results", "bento check --show-all")
        click.echo(
            f"\nPlease check '{context.pretty_path(context.baseline_file_path)}' in to source control.",
            err=True,
        )
Пример #8
0
def check(
    context: Context,
    formatter: Tuple[str, ...] = (),
    pager: bool = True,
    show_all: bool = False,
    staged_only: bool = False,
    tool: Optional[str] = None,
    paths: Optional[List[str]] = None,
) -> None:
    """
    Checks for new findings.

    Only findings not previously archived will be displayed (use --show-all
    to display archived findings).

    By default, 'bento check' will check the entire project. To run
    on one or more paths only, run:

      bento check path1 path2 ...
    """
    if tool and tool not in context.configured_tools:
        click.echo(
            f"{tool} has not been configured. Adding default configuration for tool to .bento.yml"
        )
        update_tool_run(context, tool, False)
        # Set configured_tools to None so that future calls will
        # update and include newly added tool
        context._configured_tools = None

    if not context.config_path.exists():
        echo_error("No Bento configuration found. Please run `bento init`.")
        sys.exit(3)

    if not show_all and context.baseline_file_path.exists():
        with context.baseline_file_path.open() as json_file:
            baseline = bento.result.yml_to_violation_hashes(json_file)
    else:
        baseline = {}

    config = context.config
    if formatter:
        config["formatter"] = [{f: {}} for f in formatter]
    fmts = context.formatters
    findings_to_log: List[Any] = []

    click.echo("Running Bento checks...\n", err=True)

    ctx = noop_context()
    if paths and len(paths) > 0:
        if staged_only:
            raise Exception(
                "--staged_only should not be used with explicit paths")
    elif staged_only:
        ctx = staged_files_only(
            os.path.join(os.path.expanduser("~"), ".cache", "bento",
                         "patches"))
        paths = get_staged_files()
    else:
        paths = None

    with ctx:
        before = time.time()
        runner = bento.tool_runner.Runner()
        tools: Iterable[Tool[Any]] = context.tools.values()

        if tool:
            tools = [context.configured_tools[tool]]

        all_results = runner.parallel_results(tools, baseline, paths)
        elapsed = time.time() - before

    # Progress bars terminate on whitespace
    echo_newline()

    is_error = False

    n_all = 0
    n_all_filtered = 0
    filtered_findings: Dict[str, List[Violation]] = {}
    for tool_id, findings in all_results:
        if isinstance(findings, Exception):
            logging.error(findings)
            echo_error(f"Error while running {tool_id}: {findings}")
            if isinstance(findings, subprocess.CalledProcessError):
                click.secho(findings.stderr, err=True)
                click.secho(findings.stdout, err=True)
            if isinstance(findings, NodeError):
                echo_warning(
                    f"Node.js not found or version is not compatible with ESLint v6."
                )

            click.secho(
                f"""-------------------------------------------------------------------------------------------------
This may be due to a corrupted tool installation. You might be able to fix this issue by running:

  bento init --clean

You can also view full details of this error in `{bento.constants.DEFAULT_LOG_PATH}`.
-------------------------------------------------------------------------------------------------
""",
                err=True,
            )
            is_error = True
        elif isinstance(findings, list) and findings:
            findings_to_log += bento.metrics.violations_to_metrics(
                tool_id,
                context.timestamp,
                findings,
                __get_ignores_for_tool(tool_id, config),
            )
            filtered = [f for f in findings if not f.filtered]
            filtered_findings[tool_id] = filtered

            n_all += len(findings)
            n_filtered = len(filtered)
            n_all_filtered += n_filtered
            logging.debug(f"{tool_id}: {n_filtered} findings passed filter")

    def post_metrics() -> None:
        bento.network.post_metrics(findings_to_log, is_finding=True)

    stats_thread = threading.Thread(name="stats", target=post_metrics)
    stats_thread.start()

    if n_all_filtered > 0:
        dumped = [f.dump(filtered_findings) for f in fmts]
        context.start_user_timer()
        bento.util.less(dumped, pager=pager, overrun_pages=OVERRUN_PAGES)
        context.stop_user_timer()

        echo_warning(f"{n_all_filtered} finding(s) in {elapsed:.2f} s\n")
        if not context.is_init:
            echo_next_step("To suppress all findings", "bento archive")
    else:
        echo_success(f"0 findings in {elapsed:.2f} s\n")

    n_archived = n_all - n_all_filtered
    if n_archived > 0 and not show_all:
        echo_next_step(
            f"Not showing {n_archived} archived finding(s). To view",
            f"bento check {SHOW_ALL}",
        )

    if is_error:
        sys.exit(3)
    elif n_all_filtered > 0:
        sys.exit(2)