def test_dispatch_multiple_rules(self): s = Stoq(base_dir=utils.get_data_dir(), dispatchers=['simple_dispatcher']) s.load_plugin('simple_dispatcher').WORKERS = ['simple_worker', 'simple_worker'] simple_worker = s.load_plugin('simple_worker') simple_worker.scan = create_autospec(simple_worker.scan, return_value=None) s.scan(self.generic_content) self.assertEqual(simple_worker.scan.call_count, 1) self.assertEqual(len(simple_worker.scan.call_args[0]), 2)
def test_connector_exception(self): s = Stoq(base_dir=utils.get_data_dir(), connectors=['dummy_connector']) dummy_connector = s.load_plugin('dummy_connector') dummy_connector.save = create_autospec( dummy_connector.save, side_effect=RuntimeError('Unexpected exception')) with self.assertRaises(Exception): s.scan(self.generic_content)
def test_dispatch_multiple_plugins2(self): again_multi_plugin_content = b'again-multi-plugin-space-content' s = Stoq(base_dir=utils.get_data_dir(), dispatchers=['simple_dispatcher']) s.load_plugin('simple_dispatcher').WORKERS = ['simple_worker', 'dummy_worker'] simple_worker = s.load_plugin('simple_worker') simple_worker.scan = create_autospec(simple_worker.scan, return_value=None) dummy_worker = s.load_plugin('dummy_worker') dummy_worker.scan = create_autospec(dummy_worker.scan, return_value=None) s.scan(again_multi_plugin_content) simple_worker.scan.assert_called_once() self.assertEqual(len(simple_worker.scan.call_args[0]), 2) dummy_worker.scan.assert_called_once() self.assertEqual(len(dummy_worker.scan.call_args[0]), 2)
def test_connector_exception(self): s = Stoq(base_dir=utils.get_data_dir(), connectors=['dummy_connector']) dummy_connector = s.load_plugin('dummy_connector') dummy_connector.save = create_autospec( dummy_connector.save, side_effect=RuntimeError('Unexpected exception') ) logging.disable(logging.NOTSET) with self.assertLogs(level='ERROR') as cm: s.scan(self.generic_content) self.assertTrue( cm.output[0].startswith( 'ERROR:stoq:Failed to save results using dummy_connector' ) )
def test_archiver_in_results(self): s = Stoq(base_dir=utils.get_data_dir(), dest_archivers=['simple_archiver']) response = s.scan( self.generic_content, request_meta=RequestMeta(archive_payloads=True) ) self.assertIn('simple_archiver', response.results[0].archivers) self.assertIn('file_save_id', response.results[0].archivers['simple_archiver'])
def test_start_dispatch(self): s = Stoq(base_dir=utils.get_data_dir()) response = s.scan(self.generic_content, add_start_dispatch=['extract_random']) self.assertIn('extract_random', response.results[0].plugins_run['workers'][0]) self.assertNotIn( 'extract_random', response.results[1].plugins_run['workers'][0] )
def test_decorator(self): s = Stoq(base_dir=utils.get_data_dir(), decorators=['simple_decorator']) _ = s.load_plugin('simple_decorator') response = s.scan(self.generic_content) self.assertIn('simple_decorator', response.decorators) self.assertIn('simple_decoration', response.decorators['simple_decorator']) self.assertEqual(len(response.errors), 0)
def test_decorator_exception(self): s = Stoq(base_dir=utils.get_data_dir(), decorators=['simple_decorator']) simple_decorator = s.load_plugin('simple_decorator') simple_decorator.RAISE_EXCEPTION = True response = s.scan(self.generic_content) self.assertEqual(len(response.errors), 1) self.assertIn('Test exception', response.errors['simple_decorator'][0])
def test_deep_dispatch_nonexistent_plugin(self): s = Stoq(base_dir=utils.get_data_dir()) response = s.scan(self.generic_content, add_start_deep_dispatch=['this_plugin_doesnt_exist']) self.assertNotIn('this_plugin_doesnt_exist', response.results[0].plugins_run['workers'][0]) self.assertEqual(len(response.errors), 1)
def test_dont_dest_archive_yara(self): s = Stoq(base_dir=utils.get_data_dir(), dest_archivers=['dummy_archiver']) response = s.scan( self.generic_content, request_meta=RequestMeta(archive_payloads=True) ) # The yara rule 'similar_simple_rule' should set save = False self.assertNotIn('dummy_archiver', response.results[0].archivers)
def test_worker_in_results(self): s = Stoq(base_dir=utils.get_data_dir()) response = s.scan(self.generic_content, add_start_dispatch=['simple_worker']) self.assertIn('simple_worker', response.results[0].workers[0]) self.assertIn( 'valuable_insight', response.results[0].workers[0]['simple_worker'] )
def test_dispatch_from_worker(self): s = Stoq(base_dir=utils.get_data_dir()) simple_worker = s.load_plugin('simple_worker') simple_worker.DISPATCH_TO = ['extract_random'] response = s.scan(self.generic_content, add_start_dispatch=['simple_worker']) self.assertIn('simple_worker', response.results[0].plugins_run['workers'][0]) self.assertIn('extract_random', response.results[1].plugins_run['workers'][0]) self.assertEqual('extract_random', response.results[2].extracted_by)
def test_dest_archiver_exception(self): s = Stoq(base_dir=utils.get_data_dir(), dest_archivers=['simple_archiver']) simple_archiver = s.load_plugin('simple_archiver') simple_archiver.RAISE_EXCEPTION = True response = s.scan(self.generic_content) self.assertIn('simple_archiver', response.results[0].plugins_run['archivers']) self.assertEqual(len(response.errors), 1) self.assertIn('Test exception', response.errors['simple_archiver'][0])
def test_always_dispatch(self): s = Stoq(base_dir=utils.get_data_dir(), always_dispatch=['simple_worker']) response = s.scan(self.generic_content) self.assertIn('simple_worker', s._loaded_plugins) self.assertIn('simple_worker', response.results[0].plugins_run['workers'][0]) self.assertIn('simple_worker', response.results[1].plugins_run['workers'][0])
def test_decorator_errors(self): s = Stoq(base_dir=utils.get_data_dir(), decorators=['simple_decorator']) simple_decorator = s.load_plugin('simple_decorator') simple_decorator.RETURN_ERRORS = True response = s.scan(self.generic_content) self.assertIn('simple_decorator', response.decorators) self.assertIn('simple_decoration', response.decorators['simple_decorator']) self.assertEqual(len(response.errors), 1) self.assertIn('Test error', response.errors['simple_decorator'][0])
def test_worker_errors(self): s = Stoq(base_dir=utils.get_data_dir()) simple_worker = s.load_plugin('simple_worker') simple_worker.RETURN_ERRORS = True response = s.scan(self.generic_content, add_start_dispatch=['simple_worker']) self.assertIn('simple_worker', response.results[0].plugins_run['workers'][0]) self.assertIn('simple_worker', response.results[0].workers[0]) self.assertEqual(len(response.errors), 1) self.assertIn('Test error', response.errors['simple_worker'][0])
def test_dispatch(self): s = Stoq(base_dir=utils.get_data_dir(), dispatchers=['simple_dispatcher']) dummy_worker = s.load_plugin('dummy_worker') dummy_worker.scan = create_autospec(dummy_worker.scan, return_value=None) response = s.scan(self.generic_content) self.assertEqual(len(dummy_worker.scan.call_args[0]), 2) self.assertEqual( dummy_worker.scan.call_args[0][0].dispatch_meta['simple_dispatcher'], {'test_key': 'Useful metadata info'}, ) self.assertIn('dummy_worker', response.results[0].plugins_run['workers'][0])
def test_dest_archive(self): s = Stoq(base_dir=utils.get_data_dir(), dest_archivers=['dummy_archiver']) dummy_archiver = s.load_plugin('dummy_archiver') dummy_archiver.archive = create_autospec( dummy_archiver.archive, return_value=None ) response = s.scan( self.generic_content, request_meta=RequestMeta(archive_payloads=True) ) dummy_archiver.archive.assert_called_once() self.assertIn('dummy_archiver', response.results[0].plugins_run['archivers'])
def test_split_results(self): s = Stoq(base_dir=utils.get_data_dir()) response = s.scan(self.generic_content, add_start_dispatch=['multiclass_plugin', 'simple_worker']) split_response = response.split() self.assertEqual(len(split_response), 2) for r in split_response: if 'simple_worker' in r['results'][0]['workers'][0]: self.assertNotIn('multiclass_plugin', r['results'][0]['workers'][0]) elif 'multiclass_plugin' in r['results'][0]['workers'][0]: self.assertNotIn('simple_worker', r['results'][0]['workers'][0]) else: raise Exception('required plugin not found in results')
def test_dont_dest_archive_request(self): s = Stoq(base_dir=utils.get_data_dir(), dest_archivers=['dummy_archiver']) dummy_archiver = s.load_plugin('dummy_archiver') dummy_archiver.archive = Mock(return_value=None) response = s.scan( self.generic_content, add_start_dispatch=['extract_random'], request_meta=RequestMeta(archive_payloads=False), ) dummy_archiver.archive.assert_not_called() self.assertNotIn('dummy_archiver', response.results[0].plugins_run['archivers']) self.assertNotIn('dummy_archiver', response.results[1].plugins_run['archivers'])
def test_dont_dest_archive_payload(self): s = Stoq(base_dir=utils.get_data_dir(), dest_archivers=['dummy_archiver']) dummy_archiver = s.load_plugin('dummy_archiver') dummy_archiver.archive = create_autospec( dummy_archiver.archive, return_value=None ) response = s.scan( self.generic_content, payload_meta=PayloadMeta(should_archive=False), add_start_dispatch=['extract_random'], request_meta=RequestMeta(archive_payloads=True), ) dummy_archiver.archive.assert_called_once() self.assertNotIn('dummy_archiver', response.results[0].plugins_run['archivers']) self.assertIn('dummy_archiver', response.results[1].plugins_run['archivers'])
def test_multiclass_plugin(self): s = Stoq(base_dir=utils.get_data_dir(), dispatchers=['multiclass_plugin']) multiclass_worker = s.load_plugin('multiclass_plugin') multiclass_worker.scan = create_autospec(multiclass_worker.scan, return_value=None) response = s.scan(self.generic_content) self.assertEqual(len(multiclass_worker.scan.call_args[0]), 2) self.assertEqual( multiclass_worker.scan.call_args[0][0]. dispatch_meta['multiclass_plugin']['multiclass_plugin']['rule0'], 'multiclass_plugin', ) self.assertIn('multiclass_plugin', response.results[0].plugins_run['workers'][0]) self.assertIn('multiclass_plugin', s._loaded_dispatcher_plugins) self.assertIn('multiclass_plugin', s._loaded_plugins)
def test_scan(self): s = Stoq(base_dir=utils.get_data_dir()) response = s.scan(self.generic_content) self.assertEqual(len(response.results), 1) self.assertEqual(response.results[0].size, len(self.generic_content))
def main() -> None: about = f'stoQ :: v{__version__} :: an automated analysis framework' # If $STOQ_HOME exists, set our base directory to that, otherwise # use $HOME/.stoq try: stoq_home = str( Path(os.getenv('STOQ_HOME', f'{str(Path.home())}/.stoq')).resolve( strict=True ) ) except FileNotFoundError as err: print(f"$STOQ_HOME is invalid, exiting: {err}", file=sys.stderr) sys.exit(1) parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description=about, epilog=''' Examples: - Scan a file with installed plugins and dispatch rules: $ %(prog)s scan mybadfile.exe - Scan a file and force it to go through the yara plugin: $ %(prog)s scan mybadfile.exe -s yara - Ingest from PubSub, force all payloads through yara, trid, and exif, then save results to file: $ %(prog)s run -a yara trid exif -P pubsub -C file - Monitor a directory (specified in dirmon.stoq) for newly created files send them to workers, and archive all payloads into MongoDB: $ %(prog)s run -P dirmon -A mongodb - Install a plugin from a directory $ %(prog)s install path/to/plugin_directory ''', ) subparsers = parser.add_subparsers(title='commands', dest='command') subparsers.required = True scan = subparsers.add_parser('scan', help='Scan a given payload') scan.add_argument( 'file', nargs='?', type=argparse.FileType('rb'), default=sys.stdin.buffer, help='File to scan, can also be provided from stdin', ) run = subparsers.add_parser( 'run', help='Continually ingest and scan payloads from Provider plugins' ) run.add_argument( '-P', '--providers', nargs='+', help='Provider plugins to ingest payloads from' ) # Add shared arguments so they still show up in the help dialog for subparser in [scan, run]: subparser.add_argument( '-A', '--dest-archivers', nargs='+', help='Archiver plugins to send payloads to', ) subparser.add_argument( '-S', '--source-archivers', nargs='+', help='Archiver plugins to read payload from', ) subparser.add_argument( '-D', '--decorators', nargs='+', help='Decorator plugins to send results to before saving', ) subparser.add_argument( '-C', '--connectors', nargs='+', help='Connector plugins to send results to' ) subparser.add_argument( '-R', '--dispatchers', nargs='+', help='Dispatcher plugins to use send payloads to', ) subparser.add_argument( '-a', '--always-dispatch', nargs='+', help='Worker plugins to always dispatch plugins to', ) subparser.add_argument( '-s', '--start-dispatch', nargs='+', help='Worker plugins to add to the original payload dispatch', ) subparser.add_argument( '--max-recursion', type=int, default=None, help='Maximum level of recursion into a payload and extracted payloads', ) subparser.add_argument('--plugin-opts', nargs='+', help='Plugin options') subparser.add_argument( '--request-source', default=None, help='Source name to add to initial scan request', ) subparser.add_argument( '--request-extra', nargs='+', help='Key/value pair to add to initial scan request metadata', ) subparser.add_argument( '--plugin-dir', nargs='+', help='Directory(ies) containing stoQ plugins' ) subparser.add_argument( '--config-file', default=f'{stoq_home}/stoq.cfg', help='Path to stoQ configuration file', ) subparser.add_argument( '--log-level', default=None, choices=['debug', 'info', 'warning', 'error' 'crtical'], help='Log level for stoQ events', ) plugin_list = subparsers.add_parser('list', help='List available plugins') plugin_list.add_argument( '--plugin-dir', nargs='+', help='Directory(ies) containing stoQ plugins' ) install = subparsers.add_parser('install', help='Install a given plugin') install.add_argument( 'plugin_path', help='Directory or Github repo of the plugin to install' ) install.add_argument( '--install_dir', default=os.path.join(stoq_home, 'plugins'), help='Override the default plugin installation directory', ) install.add_argument( '--upgrade', action='store_true', help='Force the plugin to be upgraded if it already exists', ) install.add_argument( '--github', action='store_true', help='Install plugin from Github repository' ) subparsers.add_parser('test', help='Run stoQ tests') args = parser.parse_args() plugin_opts: Union[Dict, None] = None try: if args.plugin_opts: plugin_opts = {} for arg in args.plugin_opts: plugin_name, plugin_option = arg.split(':', 1) opt, value = plugin_option.split('=', 1) if value.lower() == 'true': value = True elif value.lower() == 'false': value = False if plugin_name in plugin_opts: plugin_opts[plugin_name].update({opt: value}) else: plugin_opts[plugin_name] = {opt: value} except AttributeError: pass except ValueError as err: print(f'Failed parsing plugin option: {err}') request_meta = RequestMeta() try: if args.request_source: request_meta.source = args.request_source if args.request_extra: for arg in args.request_extra: extra_key, extra_value = arg.split('=', 1) if extra_value.lower() == 'true': extra_value = True elif extra_value.lower() == 'false': extra_value = False request_meta.extra_data[extra_key] = extra_value except AttributeError: pass except ValueError as err: print(f'Failed parsing request metadata option: {err}') try: if not os.path.isfile(args.config_file): print(f'Warning: {args.config_file} does not exist, using stoQ defaults!') except AttributeError: pass if args.command == 'scan': with args.file as f: # Verify that the file or stdin has some sort of data if not select.select([f], [], [], 0.0)[0]: print('Error: No content to scan was provided') sys.exit(2) content = f.read() if not content: print('Error: The provided content to scan was empty') sys.exit(2) if args.file.name == '<stdin>': filename = None else: path = args.file.name try: filename = os.path.basename(path.encode('utf-8')) except AttributeError: filename = os.path.basename(path) stoq = Stoq( base_dir=stoq_home, config_file=args.config_file, log_level=args.log_level, plugin_opts=plugin_opts, source_archivers=args.source_archivers, dest_archivers=args.dest_archivers, connectors=args.connectors, dispatchers=args.dispatchers, decorators=args.decorators, always_dispatch=args.always_dispatch, max_recursion=args.max_recursion, plugin_dir_list=args.plugin_dir, ) response = asyncio.get_event_loop().run_until_complete( stoq.scan( content, PayloadMeta(extra_data={'filename': filename}), request_meta=request_meta, add_start_dispatch=args.start_dispatch, ) ) if not args.connectors: print(response) elif args.command == 'run': stoq = Stoq( base_dir=stoq_home, config_file=args.config_file, log_level=args.log_level, plugin_opts=plugin_opts, providers=args.providers, source_archivers=args.source_archivers, dest_archivers=args.dest_archivers, connectors=args.connectors, dispatchers=args.dispatchers, decorators=args.decorators, always_dispatch=args.always_dispatch, max_recursion=args.max_recursion, plugin_dir_list=args.plugin_dir, ) asyncio.get_event_loop().run_until_complete( stoq.run(request_meta=request_meta, add_start_dispatch=args.start_dispatch) ) elif args.command == 'list': stoq = Stoq(base_dir=stoq_home, plugin_dir_list=args.plugin_dir) print(about) print('-' * len(about)) for name, info in stoq.list_plugins().items(): print(f'{name:<20s} v{info["version"]:<10s}{info["description"]}') print(f'\t\t\t\t- {", ".join(info["classes"]):<20s}') elif args.command == 'install': StoqPluginInstaller.install( args.plugin_path, args.install_dir, args.upgrade, args.github ) print(f'Successfully installed {args.plugin_path} into {args.install_dir}') elif args.command == 'test': test_path = os.path.dirname(tests.__file__) test_suite = unittest.TestLoader().discover(test_path) unittest.TextTestRunner(verbosity=1).run(test_suite)
def test_worker_not_in_results(self): s = Stoq(base_dir=utils.get_data_dir()) response = s.scan(self.generic_content, add_start_dispatch=['dummy_worker']) self.assertNotIn('dummy_worker', response.results[0].workers)
def test_connector(self): s = Stoq(base_dir=utils.get_data_dir(), connectors=['dummy_connector']) dummy_connector = s.load_plugin('dummy_connector') dummy_connector.save = create_autospec(dummy_connector.save) s.scan(self.generic_content) dummy_connector.save.assert_called_once()
def test_dedup(self): # The simple_worker plugin always extracts the same payload s = Stoq(base_dir=utils.get_data_dir(), always_dispatch=['simple_worker']) response = s.scan(self.generic_content) self.assertEqual(len(response.results), 2)
def test_max_recursion(self): max_rec_depth = 4 # defined in stoq.cfg s = Stoq(base_dir=utils.get_data_dir(), always_dispatch=['extract_random']) response = s.scan(self.generic_content) self.assertEqual(len(response.results), max_rec_depth + 1)
def test_archiver_not_in_results(self): s = Stoq(base_dir=utils.get_data_dir(), dest_archivers=['dummy_archiver']) response = s.scan( self.generic_content, request_meta=RequestMeta(archive_payloads=True) ) self.assertNotIn('dummy_archiver', response.results[0].archivers)