Ejemplo n.º 1
0
def main(argv=None):
    if len(sys.argv) == 1:  # pragma: no cover
        sys.argv.append('-h')

    args = parse_args(argv)
    if args.verbose:  # pragma: no cover
        CustomLog.enableDebug(args.verbose)

    plugins = initialize_plugins(args.plugins)

    if args.scan:
        if args.exclude:
            args.exclude = args.exclude[0]

        print(
            json.dumps(
                baseline.initialize(
                    plugins,
                    args.exclude,
                    args.scan,
                ).format_for_baseline_output(),
                indent=2,
                sort_keys=True,
            ), )

    return 0
Ejemplo n.º 2
0
def main(argv=None):
    args = parse_args(argv)
    if args.verbose:  # pragma: no cover
        CustomLog.enableDebug(args.verbose)

    try:
        # If baseline is provided, we first want to make sure
        # it's valid, before doing any further computation.
        baseline_collection = get_baseline(args.baseline[0])
    except (IOError, ValueError):
        # Error logs handled in load_baseline_from_file logic.
        return 1

    results = find_secrets_in_files(args)
    if baseline_collection:
        original_results = results
        results = get_secrets_not_in_baseline(
            results,
            baseline_collection,
        )

    if len(results.data) > 0:
        pretty_print_diagnostics(results)
        return 1

    if not baseline_collection:
        return 0

    # Only attempt baseline modifications if we don't find any new secrets
    successful_update = update_baseline_with_removed_secrets(
        original_results,
        baseline_collection,
        args.filenames,
    )
    if successful_update:
        with open(args.baseline[0], 'w') as f:
            f.write(
                json.dumps(
                    baseline_collection.format_for_baseline_output(),
                    indent=2,
                )
            )

        # The pre-commit framework should automatically detect a file change
        # and print a relevant error message.
        return 1

    return 0
Ejemplo n.º 3
0
def raise_exception_if_baseline_file_is_not_up_to_date(filename):
    """We want to make sure that if there are changes to the baseline
    file, they will be included in the commit. This way, we can keep
    our baselines up-to-date.

    :raises: ValueError
    """
    try:
        files_changed_but_not_staged = subprocess.check_output(
            'git diff --name-only'.split()).split()
    except subprocess.CalledProcessError:
        # Since we don't pipe stderr, we get free logging through git.
        raise ValueError

    if filename.encode() in files_changed_but_not_staged:
        CustomLog(formatter='%(message)s').getLogger()\
            .error((
                'Your baseline file ({}) is unstaged.\n'
                '`git add {}` to fix this.'
            ).format(
                filename,
                filename,
            ))

        raise ValueError
Ejemplo n.º 4
0
def pretty_print_diagnostics(secrets):
    """Prints a helpful error message, for better usability.

    :type secrets: SecretsCollection
    """
    log = CustomLog(formatter='%(message)s').getLogger()

    _print_warning_header(log)
    _print_secrets_found(log, secrets)
    _print_mitigation_suggestions(log)
from __future__ import absolute_import

import os
import subprocess

from detect_secrets.core.log import CustomLog

from detect_secrets_server.repos.base_tracked_repo import BaseTrackedRepo


CustomLogObj = CustomLog()


class LocalTrackedRepo(BaseTrackedRepo):

    def cron(self):
        return "%s %s" % (super(LocalTrackedRepo, self).cron(), '--local')

    @property
    def repo_location(self):
        # When we're performing git commands on a local repository, we need to reference
        # the `/.git` folder within the cloned git repo.
        return os.path.join(self.repo, '.git')

    def clone_and_pull_repo(self):
        # Assumption: If you are scanning a local git repo, then you are "actively"
        # working on it. Therefore, this module will not bear the responsibility
        # of auto-updating the repo with `git pull`.
        pass

    @classmethod
Ejemplo n.º 6
0
def _get_custom_log():
    return CustomLog(formatter='%(message)s').getLogger()
Ejemplo n.º 7
0
def main(argv=None):
    """
    Expected Usage:
      1. Initialize TrackedRepos from config.yaml, and save to crontab.
      2. Each cron command will run and scan git diff from previous commit saved, to now.
      3. If something is found, alert.

    :return: shell error code
    """
    if len(sys.argv) == 1:  # pragma: no cover
        sys.argv.append('-h')

    args = parse_args(argv)
    if args.verbose:    # pragma: no cover
        CustomLog.enableDebug(args.verbose)

    plugin_sensitivity = parse_sensitivity_values(args)
    repo_config = parse_repo_config(args)
    s3_config = parse_s3_config(args)

    if args.initialize:
        # initialize sets up the local file storage for tracking
        try:
            tracked_repos = initialize_repos_from_repo_yaml(
                args.initialize,
                plugin_sensitivity,
                repo_config,
                s3_config,
            )
        except IOError:
            # Error handled in initialize_repos_from_repo_yaml
            return 1

        cron_repos = [repo for repo in tracked_repos if repo.save()]
        if not cron_repos:
            return 0

        print('# detect-secrets scanner')
        for repo in cron_repos:
            print(
                '{} {}'.format(
                    repo.cron(),
                    args.output_hook_command,
                )
            )

    elif args.add_repo:
        add_repo(
            args.add_repo[0],
            plugin_sensitivity,
            is_local_repo=args.local,
            s3_config=s3_config,
            repo_config=repo_config,
        )

    elif args.scan_repo:
        log = CustomLogObj.getLogger()

        repo_name = args.scan_repo[0]
        repo = tracked_repo_factory(args.local, bool(s3_config)) \
            .load_from_file(repo_name, repo_config, s3_config)
        if not repo:
            return 1

        secrets = repo.scan()

        if not secrets:
            return 1

        if len(secrets.data) > 0:
            log.error('SCAN COMPLETE - We found secrets in: %s', repo.name)

            secrets = secrets.json()
            set_authors_for_found_secrets(secrets, repo)

            alert = {
                'alert': 'Secrets found',
                'repo_name': repo.name,
                'secrets': secrets,
            }
            log.error(alert)
            args.output_hook.alert(repo.name, secrets)
        else:
            log.info('SCAN COMPLETE - STATUS: clean for %s', repo.name)

            # Save records, since the latest scan indicates that the most recent commit is clean
            repo.update()
            repo.save(OverrideLevel.ALWAYS)

    return 0