def main(argv=None): if argv is None: # pragma: no cover argv = sys.argv[1:] if len(argv) == 0: # pragma: no cover argv.append('-h') args = parse_args(argv) if args.verbose: # pragma: no cover log.set_debug_level(args.verbose) if args.action == 'add': if getattr(args, 'config', False): actions.initialize(args) else: actions.add_repo(args) elif args.action == 'install': actions.install_mapper(args) elif args.action == 'list': actions.display_tracked_repositories(args) elif args.action == 'scan': return actions.scan_repo(args) return 0
def main(argv=None): if len(sys.argv) == 1: # pragma: no cover sys.argv.append('-h') args = parse_args(argv) if args.verbose: # pragma: no cover log.set_debug_level(args.verbose) if args.version: # pragma: no cover print(VERSION) return if args.scan: print( json.dumps( _perform_scan(args), indent=2, sort_keys=True, ), ) elif args.audit: audit.audit_baseline(args.audit[0]) return 0
def main(argv=None): if len(sys.argv) == 1: # pragma: no cover sys.argv.append('-h') args = parse_args(argv) if args.verbose: # pragma: no cover log.set_debug_level(args.verbose) if args.action == 'scan': # Plugins are *always* rescanned with fresh settings, because # we want to get the latest updates. plugins = initialize.from_parser_builder(args.plugins) if args.string: line = args.string if isinstance(args.string, bool): line = sys.stdin.read().splitlines()[0] _scan_string(line, plugins) else: baseline_dict = _perform_scan( args, plugins, ) if args.import_filename: write_baseline_to_file( filename=args.import_filename[0], data=baseline_dict, ) else: print(baseline.format_baseline_for_output(baseline_dict, ), ) elif args.action == 'audit': if not args.diff: audit.audit_baseline(args.filename[0]) return 0 if len(args.filename) != 2: print( 'Must specify two files to compare!', file=sys.stderr, ) return 1 try: audit.compare_baselines(args.filename[0], args.filename[1]) except audit.RedundantComparisonError: print( 'No difference, because it\'s the same file!', file=sys.stderr, ) return 0
def main(argv: Optional[List[str]] = None) -> int: try: args = parse_args(argv) except ValueError: return 1 if args.verbose: # pragma: no cover log.set_debug_level(args.verbose) # Find all secrets in files to be committed secrets = SecretsCollection() for filename in args.filenames: secrets.scan_file(filename) new_secrets = secrets if args.baseline: new_secrets = secrets - args.baseline if new_secrets: pretty_print_diagnostics(new_secrets) return 1 if not args.baseline: return 0 # Only attempt baseline modifications if we don't find any new secrets. is_modified = should_update_baseline( args.baseline, scanned_results=secrets, filelist=args.filenames, baseline_version=args.baseline_version, ) if is_modified: if args.baseline_version != VERSION: with open(args.baseline_filename) as f: old_baseline = json.loads(f.read()) # Override the results, because this has been updated in `should_update_baseline`. old_baseline['results'] = args.baseline.json() args.baseline = baseline.upgrade(old_baseline) baseline.save_to_file(args.baseline, filename=args.baseline_filename) print( 'The baseline file was updated.\n' 'Probably to keep line numbers of secrets up-to-date.\n' 'Please `git add {}`, thank you.\n\n'.format( args.baseline_filename), ) return 3 return 0
def main(argv=None): if len(sys.argv) == 1: # pragma: no cover sys.argv.append('-h') args = parse_args(argv) if args.verbose: # pragma: no cover log.set_debug_level(args.verbose) if args.version: # pragma: no cover print(VERSION) return if args.scan: print(json.dumps( _perform_scan(args), indent=2, sort_keys=True, ), ) elif args.audit: audit.audit_baseline(args.audit[0]) return 0
def main(argv=None): if len(sys.argv) == 1: # pragma: no cover sys.argv.append('-h') args = parse_args(argv) if args.verbose: # pragma: no cover log.set_debug_level(args.verbose) if args.action == 'scan': # Plugins are *always* rescanned with fresh settings, because # we want to get the latest updates. plugins = initialize.from_parser_builder(args.plugins) if args.string: line = args.string if isinstance(args.string, bool): line = sys.stdin.read().splitlines()[0] _scan_string(line, plugins) else: output = json.dumps( _perform_scan(args, plugins), indent=2, sort_keys=True, separators=(',', ': '), ) if args.import_filename: _write_to_file(args.import_filename[0], output) else: print(output) elif args.action == 'audit': audit.audit_baseline(args.filename[0]) return 0
def main(argv=None): if len(sys.argv) == 1: # pragma: no cover sys.argv.append('-h') args = parse_args(argv) if args.verbose: # pragma: no cover log.set_debug_level(args.verbose) if args.action == 'scan': output = json.dumps( _perform_scan(args), indent=2, sort_keys=True, ) if args.import_filename: _write_to_file(args.import_filename[0], output) else: print(output) elif args.action == 'audit': audit.audit_baseline(args.filename[0]) return 0
def main(argv=sys.argv[1:]): if len(sys.argv) == 1: # pragma: no cover sys.argv.append('--help') args = parse_args(argv) if args.verbose: # pragma: no cover log.set_debug_level(args.verbose) if args.action == 'scan': automaton = None word_list_hash = None if args.word_list_file: automaton, word_list_hash = build_automaton(args.word_list_file) # Plugins are *always* rescanned with fresh settings, because # we want to get the latest updates. plugins = initialize.from_parser_builder( plugins_dict=args.plugins, custom_plugin_paths=args.custom_plugin_paths, exclude_lines_regex=args.exclude_lines, automaton=automaton, should_verify_secrets=not args.no_verify, ) if args.string: line = args.string if isinstance(args.string, bool): line = sys.stdin.read().splitlines()[0] _scan_string(line, plugins) else: baseline_dict = _perform_scan( args, plugins, automaton, word_list_hash, ) if args.import_filename: write_baseline_to_file( filename=args.import_filename[0], data=baseline_dict, ) else: print(baseline.format_baseline_for_output(baseline_dict, ), ) elif args.action == 'audit': if not args.diff and not args.display_results: audit.audit_baseline(args.filename[0]) return 0 if args.display_results: audit.print_audit_results(args.filename[0]) return 0 if len(args.filename) != 2: print( 'Must specify two files to compare!', file=sys.stderr, ) return 1 try: audit.compare_baselines(args.filename[0], args.filename[1]) except audit.RedundantComparisonError: print( 'No difference, because it\'s the same file!', file=sys.stderr, ) return 0
def main(argv=None): """ Expected Usage: 1. Initialize TrackedRepos from config.yaml, and save to crontab. 2. Each cron command will run and scan git diff from previous commit saved, to now. 3. If something is found, alert. :return: shell error code """ if len(sys.argv) == 1: # pragma: no cover sys.argv.append('-h') args = parse_args(argv) if args.verbose: # pragma: no cover log.set_debug_level(args.verbose) plugin_sensitivity = parse_sensitivity_values(args) repo_config = parse_repo_config(args) s3_config = parse_s3_config(args) if args.initialize: # initialize sets up the local file storage for tracking try: tracked_repos = initialize_repos_from_repo_yaml( args.initialize, plugin_sensitivity, repo_config, s3_config, ) except IOError: # Error handled in initialize_repos_from_repo_yaml return 1 cron_repos = [repo for repo in tracked_repos if repo.save()] if not cron_repos: return 0 print('# detect-secrets scanner') for repo in cron_repos: print('{} {}'.format( repo.cron(), args.output_hook_command, )) elif args.add_repo: add_repo( args.add_repo[0], plugin_sensitivity, is_local_repo=args.local, s3_config=s3_config, repo_config=repo_config, ) elif args.scan_repo: repo_name = args.scan_repo[0] repo = tracked_repo_factory(args.local, bool(s3_config)) \ .load_from_file(repo_name, repo_config, s3_config) if not repo: return 1 secrets = repo.scan() if not secrets: return 1 if len(secrets.data) > 0: log.error('SCAN COMPLETE - We found secrets in: %s', repo.name) secrets = secrets.json() set_authors_for_found_secrets(secrets, repo) alert = { 'alert': 'Secrets found', 'repo_name': repo.name, 'secrets': secrets, } log.error(alert) args.output_hook.alert(repo.name, secrets) else: log.info('SCAN COMPLETE - STATUS: clean for %s', repo.name) # Save records, since the latest scan indicates that the most recent commit is clean repo.update() repo.save(OverrideLevel.ALWAYS) return 0