def _perform_scan(args): old_baseline = _get_existing_baseline(args.import_filename) # Plugins are *always* rescanned with fresh settings, because # we want to get the latest updates. plugins = initialize.from_parser_builder(args.plugins) # Favors --exclude argument over existing baseline's regex (if exists) if args.exclude: args.exclude = args.exclude[0] elif old_baseline and old_baseline.get('exclude_regex'): args.exclude = old_baseline['exclude_regex'] # If we have knowledge of an existing baseline file, we should use # that knowledge and *not* scan that file. if args.import_filename and args.exclude: args.exclude += r'|^{}$'.format(args.import_filename[0]) new_baseline = baseline.initialize( plugins, args.exclude, args.path, args.all_files, ).format_for_baseline_output() if old_baseline: new_baseline = baseline.merge_baseline( old_baseline, new_baseline, ) return new_baseline
def _perform_scan(args, plugins): old_baseline = _get_existing_baseline(args.import_filename) # Favors --exclude argument over existing baseline's regex (if exists) if args.exclude: args.exclude = args.exclude[0] elif old_baseline and old_baseline.get('exclude_regex'): args.exclude = old_baseline['exclude_regex'] # If we have knowledge of an existing baseline file, we should use # that knowledge and *not* scan that file. if args.import_filename: payload = '^{}$'.format(args.import_filename[0]) if args.exclude and payload not in args.exclude: args.exclude += r'|{}'.format(payload) elif not args.exclude: args.exclude = payload new_baseline = baseline.initialize( plugins, args.exclude, args.path, args.all_files, ).format_for_baseline_output() if old_baseline: new_baseline = baseline.merge_baseline( old_baseline, new_baseline, ) return new_baseline
def main(argv=None): if len(sys.argv) == 1: # pragma: no cover sys.argv.append('-h') args = parse_args(argv) if args.verbose: # pragma: no cover CustomLog.enableDebug(args.verbose) plugins = initialize_plugins(args.plugins) if args.scan: if args.exclude: args.exclude = args.exclude[0] print( json.dumps( baseline.initialize( plugins, args.exclude, args.scan, ).format_for_baseline_output(), indent=2, sort_keys=True, ), ) return 0
def _perform_scan(args): old_baseline = _get_existing_baseline(args) # Plugins are *always* rescanned with fresh settings, because # we want to get the latest updates. plugins = initialize.from_parser_builder(args.plugins) # Favors --exclude argument over existing baseline's regex (if exists) if args.exclude: args.exclude = args.exclude[0] elif old_baseline and old_baseline.get('exclude_regex'): args.exclude = old_baseline['exclude_regex'] new_baseline = baseline.initialize( plugins, args.exclude, args.scan, ).format_for_baseline_output() if old_baseline: new_baseline = baseline.merge_baseline( old_baseline, new_baseline, ) return new_baseline
def scan(argv): args = parse_args(argv) automaton = None word_list_hash = None if args.word_list_file: automaton, word_list_hash = build_automaton(args.word_list_file) # Plugins are *always* rescanned with fresh settings, because # we want to get the latest updates. plugins = initialize.from_parser_builder( plugins_dict=args.plugins, custom_plugin_paths=args.custom_plugin_paths, exclude_lines_regex=args.exclude_lines, automaton=automaton, should_verify_secrets=not args.no_verify, ) return baseline.initialize( path=args.path, plugins=plugins, custom_plugin_paths=args.custom_plugin_paths, exclude_files_regex=args.exclude_files, exclude_lines_regex=args.exclude_lines, word_list_file=args.word_list_file, word_list_hash=word_list_hash, should_scan_all_files=args.all_files, )
def _perform_scan(args): old_baseline = _get_existing_baseline(args) # Plugins are *always* rescanned with fresh settings, because # we want to get the latest updates. plugins = initialize.from_parser_builder(args.plugins) # Favors --exclude argument over existing baseline's regex (if exists) if args.exclude: args.exclude = args.exclude[0] elif old_baseline and old_baseline.get('exclude_regex'): args.exclude = old_baseline['exclude_regex'] new_baseline = baseline.initialize( plugins, args.exclude, args.scan, ).format_for_baseline_output() if old_baseline: new_baseline = baseline.merge_baseline( old_baseline, new_baseline, ) return new_baseline
def _perform_scan(args, plugins, automaton, word_list_hash): """ :param args: output of `argparse.ArgumentParser.parse_args` :param plugins: tuple of initialized plugins :type automaton: ahocorasick.Automaton|None :param automaton: optional automaton for ignoring certain words. :type word_list_hash: str|None :param word_list_hash: optional iterated sha1 hash of the words in the word list. :rtype: dict """ old_baseline = _get_existing_baseline(args.import_filename) if old_baseline: plugins = initialize.merge_plugins_from_baseline( _get_plugins_from_baseline(old_baseline), args, automaton=automaton, ) # Favors CLI arguments over existing baseline configuration if old_baseline: if not args.exclude_files: args.exclude_files = _get_exclude_files(old_baseline) if (not args.exclude_lines and old_baseline.get('exclude')): args.exclude_lines = old_baseline['exclude']['lines'] if (not args.word_list_file and old_baseline.get('word_list')): args.word_list_file = old_baseline['word_list']['file'] if (not args.custom_plugin_paths and old_baseline.get('custom_plugin_paths')): args.custom_plugin_paths = old_baseline['custom_plugin_paths'] # If we have knowledge of an existing baseline file, we should use # that knowledge and add it to our exclude_files regex. if args.import_filename: _add_baseline_to_exclude_files(args) new_baseline = baseline.initialize( path=args.path, plugins=plugins, custom_plugin_paths=args.custom_plugin_paths, exclude_files_regex=args.exclude_files, exclude_lines_regex=args.exclude_lines, word_list_file=args.word_list_file, word_list_hash=word_list_hash, should_scan_all_files=args.all_files, ).format_for_baseline_output() if old_baseline: new_baseline = baseline.merge_baseline( old_baseline, new_baseline, ) return new_baseline
def get_results( self, path=['./test_data/files'], exclude_files_regex=None, scan_all_files=False, ): return baseline.initialize( path, self.plugins, exclude_files_regex=exclude_files_regex, should_scan_all_files=scan_all_files, ).json()
def get_results( self, rootdir='./test_data/files', exclude_regex=None, scan_all_files=False, ): return baseline.initialize( self.plugins, rootdir=rootdir, exclude_regex=exclude_regex, scan_all_files=scan_all_files, ).json()
def _perform_scan(args, plugins): """ :param args: output of `argparse.ArgumentParser.parse_args` :param plugins: tuple of initialized plugins :rtype: dict """ old_baseline = _get_existing_baseline(args.import_filename) if old_baseline: plugins = initialize.merge_plugin_from_baseline( _get_plugin_from_baseline(old_baseline), args, ) # Favors `--exclude-files` and `--exclude-lines` CLI arguments # over existing baseline's regexes (if given) if old_baseline: if not args.exclude_files: args.exclude_files = _get_exclude_files(old_baseline) if ( not args.exclude_lines and old_baseline.get('exclude') ): args.exclude_lines = old_baseline['exclude']['lines'] # If we have knowledge of an existing baseline file, we should use # that knowledge and add it to our exclude_files regex. if args.import_filename: _add_baseline_to_exclude_files(args) new_baseline = baseline.initialize( plugins=plugins, exclude_files_regex=args.exclude_files, exclude_lines_regex=args.exclude_lines, path=args.path, scan_all_files=args.all_files, ).format_for_baseline_output() if old_baseline: new_baseline = baseline.merge_baseline( old_baseline, new_baseline, ) return new_baseline
def get_results(self, rootdir='./test_data/files', exclude_regex=None): return baseline.initialize( self.plugins, rootdir=rootdir, exclude_regex=exclude_regex, ).json()
def get_results(self, rootdir='./test_data/files', exclude_regex=None): return baseline.initialize( self.plugins, rootdir=rootdir, exclude_regex=exclude_regex, ).json()