def test_precedence_with_baseline(parser): with tempfile.NamedTemporaryFile() as f: f.write( json.dumps({ 'version': '0.0.1', 'plugins_used': [ { 'name': 'Base64HighEntropyString', 'base64_limit': 3, }, { 'name': 'AWSKeyDetector', }, ], 'results': [], }).encode(), ) f.seek(0) parser.parse_args([ '--baseline', f.name, '--disable-plugin', 'Base64HighEntropyString', ]) assert len(get_settings().plugins) == 1 assert 'AWSKeyDetector' in get_settings().plugins
def setup_settings(): get_settings().configure_plugins([ { 'name': 'Base64HighEntropyString', 'limit': 3, }, { 'name': 'PrivateKeyDetector', }, ])
def configure_plugins(): get_settings().configure_plugins([ { 'name': 'Base64HighEntropyString', 'limit': 4.5, }, { 'name': 'HexHighEntropyString', 'limit': 3, }, ])
def test_success(parser): args = parser.parse_args([ '--disable-plugin', 'Base64HighEntropyString', '--disable-plugin', 'AWSKeyDetector', ]) assert args.disable_plugin == { 'AWSKeyDetector', 'Base64HighEntropyString' } assert 'AWSKeyDetector' not in get_settings().plugins assert 'Base64HighEntropyString' not in get_settings().plugins assert get_settings().plugins
def test_file_based_success_yaml(): get_settings().configure_plugins([ { 'name': 'HexHighEntropyString', 'limit': 3.0, }, ]) secrets = SecretsCollection() secrets.scan_file('test_data/config.yaml') assert [str(secret).splitlines()[1] for _, secret in secrets] == [ 'Location: test_data/config.yaml:3', 'Location: test_data/config.yaml:5', ]
def test_module_success(parser): config = { # Remove all filters, so we can test adding things back in. 'filters_used': [], } with transient_settings(config): default_filters = set(get_settings().filters.keys()) module_path = 'detect_secrets.filters.heuristic.is_sequential_string' assert module_path not in default_filters with transient_settings(config): parser.parse_args(['scan', '--filter', module_path]) assert module_path in get_settings().filters
def test_filename_filters_are_invoked_first(): get_settings().configure_filters([ { 'path': 'detect_secrets.filters.regex.should_exclude_file', 'pattern': [ 'test|baseline', ], }, ]) secrets = SecretsCollection() with open('test_data/sample.diff') as f: secrets.scan_diff(f.read()) assert len(secrets.files) == 0
def test_should_exclude_file(parser): parser.parse_args([ '--exclude-files', '^tests/.*', '--exclude-files', '.*/i18/.*', ]) assert filters.regex.should_exclude_file('tests/blah.py') is True assert filters.regex.should_exclude_file( 'detect_secrets/tests/blah.py') is False assert filters.regex.should_exclude_file( 'app/messages/i18/en.properties') is True assert filters.regex.should_exclude_file( 'app/i18secrets/secrets.yaml') is False assert [ item for item in get_settings().json()['filters_used'] if item['path'] == 'detect_secrets.filters.regex.should_exclude_file' ] == [ { 'path': 'detect_secrets.filters.regex.should_exclude_file', 'pattern': [ '^tests/.*', '.*/i18/.*', ], }, ]
def test_should_exclude_secret(parser): parser.parse_args([ '--exclude-secrets', '^[Pp]assword[0-9]{0,3}$', '--exclude-secrets', 'my-first-password', ]) assert filters.regex.should_exclude_secret('Password123') is True assert filters.regex.should_exclude_secret('MyRealPassword') is False assert filters.regex.should_exclude_secret( '1-my-first-password-for-database') is True assert filters.regex.should_exclude_secret('my-password') is False assert [ item for item in get_settings().json()['filters_used'] if item['path'] == 'detect_secrets.filters.regex.should_exclude_secret' ] == [ { 'path': 'detect_secrets.filters.regex.should_exclude_secret', 'pattern': [ '^[Pp]assword[0-9]{0,3}$', 'my-first-password', ], }, ]
def test_should_exclude_line(parser): parser.parse_args([ '--exclude-lines', 'canarytoken', '--exclude-lines', '^not-real-secret = .*$', ]) assert filters.regex.should_exclude_line( 'password = "******"') is True assert filters.regex.should_exclude_line('password = "******"') is False assert filters.regex.should_exclude_line('not-real-secret = value') is True assert filters.regex.should_exclude_line( 'maybe-not-real-secret = value') is False assert [ item for item in get_settings().json()['filters_used'] if item['path'] == 'detect_secrets.filters.regex.should_exclude_line' ] == [ { 'path': 'detect_secrets.filters.regex.should_exclude_line', 'pattern': [ 'canarytoken', '^not-real-secret = .*$', ], }, ]
def test_success(parser): # Ensure it serializes accordingly. parser.parse_args(['-p', 'testing/plugins.py']) with tempfile.NamedTemporaryFile() as f: baseline.save_to_file(SecretsCollection(), f.name) f.seek(0) get_settings().clear() plugins.util.get_mapping_from_secret_type_to_class.cache_clear() assert 'HippoDetector' not in get_settings().plugins parser.parse_args(['--baseline', f.name]) assert get_settings().plugins['HippoDetector'] == { 'path': f'file://{os.path.abspath("testing/plugins.py")}', } assert plugins.initialize.from_plugin_classname('HippoDetector')
def test_file_based_success_config(): get_settings().configure_plugins([ { 'name': 'Base64HighEntropyString', 'limit': 3.0, }, ]) secrets = SecretsCollection() secrets.scan_file('test_data/config.ini') assert [str(secret).splitlines()[1] for _, secret in secrets] == [ 'Location: test_data/config.ini:2', 'Location: test_data/config.ini:10', 'Location: test_data/config.ini:21', 'Location: test_data/config.ini:22', 'Location: test_data/config.ini:32', ]
def test_default_plugins_initialized(parser): parser.parse_args(['scan', '--hex-limit', '2']) assert len(get_settings().plugins) == len( get_mapping_from_secret_type_to_class()) assert plugins.initialize.from_plugin_classname( 'HexHighEntropyString').entropy_limit == 2 assert plugins.initialize.from_plugin_classname( 'Base64HighEntropyString').entropy_limit == 4.5
def clear_cache(): # This is also probably too aggressive, but test pollution is tough to debug. # So let's just trade off slightly longer test runs for shorter developer time to debug # test pollution issues. get_mapping_from_secret_type_to_class.cache_clear() settings.get_settings().clear() settings.cache_bust() # This is probably too aggressive, but it saves us from remembering to do this every # time we add a filter. for module_name in dir(filters): if module_name.startswith('_'): continue module = getattr(filters, module_name) for name in dir(module): try: getattr(module, name).cache_clear() except AttributeError: pass
def test_success(parser): baseline = { 'version': '0.0.1', 'plugins_used': [ { 'name': 'AWSKeyDetector', }, { 'base64_limit': 3, 'name': 'Base64HighEntropyString', }, ], 'results': [], } with _mock_file(json.dumps(baseline)) as filename: parser.parse_args(['--baseline', filename]) assert len(get_settings().plugins) == 2 assert 'AWSKeyDetector' in get_settings().plugins assert get_settings().plugins['Base64HighEntropyString'] == {'limit': 3}
def test_line_based_success(): # Explicitly configure filters, so that additions to filters won't affect this test. get_settings().configure_filters([ # This will remove the `id` string { 'path': 'detect_secrets.filters.heuristic.is_likely_id_string' }, # This gets rid of the aws keys with `EXAMPLE` in them. { 'path': 'detect_secrets.filters.regex.should_exclude_line', 'pattern': [ 'EXAMPLE', ], }, ]) secrets = SecretsCollection() secrets.scan_file('test_data/each_secret.py') secret = next(iter(secrets['test_data/each_secret.py'])) assert secret.secret_value.startswith('c2VjcmV0IG1lc') assert len(secrets['test_data/each_secret.py']) == 1
def test_success(): # case-insensitivity assert filters.wordlist.should_exclude_secret('testPass') is True # min_length requirement assert filters.wordlist.should_exclude_secret('2short') is False assert get_settings( ).filters['detect_secrets.filters.wordlist.should_exclude_secret'] == { 'min_length': 8, # Manually computed with `sha1sum test_data/word_list.txt` 'file_hash': '116598304e5b33667e651025bcfed6b9a99484c7', 'file_name': 'test_data/word_list.txt', }
def test_only_verified_overrides_baseline_settings(parser): secrets = SecretsCollection() with tempfile.NamedTemporaryFile() as f, transient_settings({ 'filters_used': [{ 'path': 'detect_secrets.filters.common.is_ignored_due_to_verification_policies', 'min_level': VerifiedResult.UNVERIFIED.value, }], }): baseline.save_to_file(secrets, f.name) f.seek(0) parser.parse_args(['scan', '--baseline', f.name, '--only-verified']) assert get_settings().filters[ 'detect_secrets.filters.common.is_ignored_due_to_verification_policies'][ 'min_level'] == VerifiedResult.VERIFIED_TRUE.value
def test_no_verify_overrides_baseline_settings(parser): secrets = SecretsCollection() with tempfile.NamedTemporaryFile() as f, transient_settings({ 'filters_used': [{ 'path': 'detect_secrets.filters.common.is_ignored_due_to_verification_policies', 'min_level': VerifiedResult.UNVERIFIED.value, }], }): baseline.save_to_file(secrets, f.name) f.seek(0) parser.parse_args(['scan', '--baseline', f.name, '--no-verify']) for filter_path in get_settings().filters: assert filter_path.rsplit( '.')[-1] != 'is_ignored_due_to_verification_policies'
def test_force_use_all_plugins(parser): with tempfile.NamedTemporaryFile() as f: f.write( json.dumps({ 'version': '0.0.1', 'plugins_used': [ { 'name': 'AWSKeyDetector', }, ], 'results': [], }).encode(), ) f.seek(0) parser.parse_args( ['scan', '--force-use-all-plugins', '--baseline', f.name]) assert len(get_settings().plugins) == len( get_mapping_from_secret_type_to_class())
def test_precedence_with_baseline_and_explicit_value(parser): with tempfile.NamedTemporaryFile() as f: f.write( json.dumps({ 'version': '0.0.1', 'plugins_used': [ { 'name': 'Base64HighEntropyString', 'base64_limit': 3, }, ], 'results': [], }).encode(), ) f.seek(0) parser.parse_args(['--baseline', f.name, '--base64-limit', '5']) assert get_settings().plugins['Base64HighEntropyString'] == { 'limit': 5 }
def test_success(parser): parser.parse_args(['--base64-limit', '5']) assert get_settings( ).plugins['Base64HighEntropyString']['limit'] == 5.0
def test_baseline_optional(parser): parser.parse_args([]) assert len(get_settings().plugins) == len(get_mapping_from_secret_type_to_class())