def test_verion_check_latest_version_request_fails(): responses.add( responses.GET, ( 'https://detect-secrets-client-version.s3.us-south.' 'cloud-object-storage.appdomain.cloud/version' ), status=404, ) with mock.patch('detect_secrets.util.sys.stderr', new=StringIO()) as fakeErr: util.version_check() stderr = fakeErr.getvalue().strip() expected_error_msg = 'Failed to check for newer version of detect-secrets.\n' assert expected_error_msg == uncolor(stderr)
def test_version_check_not_out_of_date(): responses.add( responses.GET, ( 'https://detect-secrets-client-version.s3.us-south.' 'cloud-object-storage.appdomain.cloud/version' ), status=200, body=VERSION, ) with mock.patch('detect_secrets.util.sys.stderr', new=StringIO()) as fakeErr: util.version_check() stderr = fakeErr.getvalue().strip() expected_error_msg = '' assert expected_error_msg == stderr
def test_version_check_out_of_date(): responses.add( responses.GET, ( 'https://detect-secrets-client-version.s3.us-south.' 'cloud-object-storage.appdomain.cloud/version' ), status=200, body='1000000.0.0+ibm.0', ) with mock.patch('detect_secrets.util.sys.stderr', new=StringIO()) as fakeErr: util.version_check() stderr = fakeErr.getvalue().strip() expected_error_msg = 'WARNING: You are running an outdated version of detect-secrets.\n' + \ ' Your version: %s\n' % VERSION + \ ' Latest version: 1000000.0.0+ibm.0\n' + \ ' See upgrade guide at ' + \ 'https://ibm.biz/detect-secrets-how-to-upgrade\n' assert expected_error_msg == uncolor(stderr)
def main(argv=None): version_check() args = parse_args(argv) if args.verbose: # pragma: no cover log.set_debug_level(args.verbose) try: # If baseline is provided, we first want to make sure # it's valid, before doing any further computation. baseline_collection = get_baseline( args.baseline[0], plugin_filenames=args.plugin_filenames, ) except (IOError, TypeError, ValueError): # Error logs handled within logic. return 1 automaton = None word_list_hash = None if args.word_list_file: automaton, word_list_hash = build_automaton(args.word_list_file) plugins = initialize.from_parser_builder( args.plugins, exclude_lines_regex=args.exclude_lines, automaton=automaton, should_verify_secrets=not args.no_verify, plugin_filenames=args.plugin_filenames, ) # Merge plugins from baseline if baseline_collection: plugins = initialize.merge_plugins_from_baseline( baseline_collection.plugins, args, automaton, ) baseline_collection.plugins = plugins results_collection = find_secrets_in_files(args, plugins) if baseline_collection: original_results_collection = results_collection results_collection = get_secrets_not_in_baseline( results_collection, baseline_collection, ) if len(results_collection.data) > 0: pretty_print_diagnostics_for_new_secrets(results_collection) return 1 # if no baseline been supplied if not baseline_collection: return 0 # Only attempt baseline modifications if we don't find any new secrets baseline_modified = trim_baseline_of_removed_secrets( original_results_collection, baseline_collection, args.filenames, ) if VERSION != baseline_collection.version: baseline_collection.version = VERSION baseline_modified = True if baseline_modified: write_baseline_to_file( filename=args.baseline[0], data=baseline_collection.format_for_baseline_output(), ) log.error( 'The baseline file was updated.\n' 'Probably to keep line numbers of secrets up-to-date.\n' 'Please `git add {}`, thank you.\n\n'.format(args.baseline[0]), ) return 3 # check if there are verified but haven't been audited secrets verified_non_audited = get_verified_non_audited_secrets_from_baseline( baseline_collection, ) if len(verified_non_audited.data) > 0: pretty_print_diagnostics_for_verified_non_audited(verified_non_audited) return 2 # check if there are non-audited secrets if args.fail_on_non_audited: non_audited = get_non_audited_secrets_from_baseline( baseline_collection, ) if len(non_audited.data) > 0: pretty_print_diagnostics_for_non_audited(non_audited) return 4 return 0
def main(argv=None): version_check() if len(sys.argv) == 1: # pragma: no cover sys.argv.append('-h') args = parse_args(argv) if args.verbose: # pragma: no cover log.set_debug_level(args.verbose) if args.action == 'scan': automaton = None word_list_hash = None if args.word_list_file: automaton, word_list_hash = build_automaton(args.word_list_file) # Plugins are *always* rescanned with fresh settings, because # we want to get the latest updates. plugins = initialize.from_parser_builder( args.plugins, exclude_lines_regex=args.exclude_lines, automaton=automaton, should_verify_secrets=not args.no_verify, plugin_filenames=args.plugin_filenames, ) if args.string: line = args.string if isinstance(args.string, bool): line = sys.stdin.read().splitlines()[0] _scan_string(line, plugins) else: baseline_dict = _perform_scan( args, plugins, automaton, word_list_hash, ) if args.import_filename: write_baseline_to_file( filename=args.import_filename[0], data=baseline_dict, ) else: print(baseline.format_baseline_for_output(baseline_dict, ), ) elif args.action == 'audit': if not args.diff and not args.display_results: audit.audit_baseline(args.filename[0]) return 0 if args.display_results: audit.print_audit_results(args.filename[0]) return 0 if len(args.filename) != 2: print( 'Must specify two files to compare!', file=sys.stderr, ) return 1 try: audit.compare_baselines(args.filename[0], args.filename[1]) except audit.RedundantComparisonError: print( 'No difference, because it\'s the same file!', file=sys.stderr, ) return 0