async def test_scan_xor_single_value(self) -> None: s = Stoq(plugin_dir_list=[self.plugin_dir]) plugin = s.load_plugin(self.plugin_name) payload = Payload(self._xor_encode(self.generic_data, self.xorkeys)) dispatch_meta = {'test': {'test': {'meta': {'xorkey': self.xorkeys}}}} payload.dispatch_meta = dispatch_meta response = await plugin.scan(payload, RequestMeta()) self.assertIsInstance(response, WorkerResponse) self.assertEqual(self.generic_data, response.extracted[0].content) self.assertEqual( self.xorkeys, response.extracted[0].payload_meta.extra_data['xorkey'])
def test_payloadresults_to_str(self): payload = Payload(self.generic_content) response = PayloadResults.from_payload(payload) response_str = str(response) response_dict = json.loads(response_str) self.assertIsInstance(response_str, str) self.assertIsInstance(response_dict, dict)
def ingest(self, queue: Queue) -> None: if self.RAISE_EXCEPTION: raise RuntimeError('Test exception, please ignore') if self.RETURN_PAYLOAD: queue.put(Payload(b'Important stuff')) else: queue.put({"simple_archiver": {"task": "This is a task from provider"}})
async def test_scan(self) -> None: s = Stoq(plugin_dir_list=[self.plugin_dir]) plugin = s.load_plugin(self.plugin_name) payload = Payload(self.generic_data) response = await plugin.scan(payload, Request()) self.assertIsInstance(response, WorkerResponse) self.assertEqual('text/plain', response.results['mimetype'])
def test_dispatcher(self) -> None: s = Stoq( plugin_dir_list=[self.plugin_dir], plugin_opts={ self.plugin_name: { 'dispatch_rules': f'{self.data_dir}/dispatch_rules.yar' } }, ) plugin = s.load_plugin(self.plugin_name) payload = Payload(self.generic_data) response = plugin.get_dispatches(payload, RequestMeta()) self.assertIsInstance(response, DispatcherResponse) self.assertIn('test_dispatch_plugin', response.plugin_names) self.assertEqual( 'test_dispatch_rule', response.meta['test_dispatch_plugin']['rule'] ) self.assertIn( 'test_dispatch_plugin', response.meta['test_dispatch_plugin']['meta']['plugin'], ) self.assertIn('True', response.meta['test_dispatch_plugin']['meta']['save']) self.assertEqual( ['tag1', 'tag2'], response.meta['test_dispatch_plugin']['tags'] )
async def test_scan_notnil(self) -> None: s = Stoq(plugin_dir_list=[self.plugin_dir]) plugin = s.load_plugin(self.plugin_name) payload = Payload(self.entropy_not_nil) response = await plugin.scan(payload, RequestMeta()) self.assertIsInstance(response, WorkerResponse) self.assertEqual(1.584962500721156, response.results['entropy'])
def test_scan(self) -> None: s = Stoq(plugin_dir_list=[self.plugin_dir]) plugin = s.load_plugin(self.plugin_name) payload = Payload(self.generic_data) response = plugin.scan(payload, RequestMeta()) self.assertIsInstance(response, WorkerResponse) self.assertEqual('3:hMCE7pr3Kn:huJ6', response.results['ssdeep'])
async def test_scan(self) -> None: s = Stoq(plugin_dir_list=[str(self.plugin_dir)]) plugin = s.load_plugin(self.plugin_name) payload = Payload(base64.b64encode(self.generic_data)) response = await plugin.scan(payload, RequestMeta()) self.assertIsInstance(response, WorkerResponse) self.assertEqual(1, len(response.extracted)) self.assertEqual(self.generic_data, response.extracted[0].content)
def get(self, task: ArchiverResponse) -> Payload: """ Retrieve archived payload from disk """ path = os.path.abspath(task.results['path']) meta = PayloadMeta(extra_data=task.results) with open(path, 'rb') as f: return Payload(f.read(), meta)
def on_created(self, event): meta = PayloadMeta( extra_data={ 'filename': os.path.basename(event.src_path), 'source_dir': os.path.dirname(event.src_path), } ) with open(event.src_path, "rb") as f: self.queue.put(Payload(f.read(), meta))
def test_scan(self) -> None: s = Stoq(plugin_dir_list=[self.plugin_dir]) plugin = s.load_plugin(self.plugin_name) with open(f'{self.data_dir}/sample.pdf', 'rb') as f: payload = Payload(f.read()) response = plugin.scan(payload, RequestMeta()) self.assertIsInstance(response, WorkerResponse) self.assertIn('FileType', response.results) self.assertEqual('PDF', response.results['FileType']) self.assertEqual(6, response.results['PageCount'])
async def get(self, task: ArchiverResponse) -> Payload: """ Retrieve archived payload from disk """ path = Path(task.results['path']).resolve() meta = PayloadMeta(extra_data=task.results) self.log.debug(f'got task: {task}, path: {path}, meta: {meta}') with open(path, 'rb') as f: return Payload(f.read(), meta)
def test_scan(self) -> None: s = Stoq(plugin_dir_list=[self.plugin_dir]) plugin = s.load_plugin(self.plugin_name) with open(f'{self.data_dir}/TestJavaClass.class', 'rb') as f: payload = Payload(f.read()) response = plugin.scan(payload, RequestMeta()) self.assertIsInstance(response, WorkerResponse) self.assertIn('TestJavaClass', response.results['provided']) self.assertGreaterEqual(len(response.results['provided']), 4) self.assertGreaterEqual(len(response.results['required']), 2) self.assertGreaterEqual(len(response.results['constants']), 10)
async def test_scan(self) -> None: s = Stoq(plugin_dir_list=[self.plugin_dir]) plugin = s.load_plugin(self.plugin_name) xord = bytes(x ^ 92 for x in self.generic_data) payload = Payload(xord) response = await plugin.scan(payload, Request()) self.assertIsInstance(response, WorkerResponse) self.assertIn('0x5C', response.results) self.assertEqual('AdjustTokenPrivileges CurrentVersion', response.results['0x5C'][0]['match']) self.assertEqual('CurrentVersion', response.results['0x5C'][1]['match'])
def _queue(self, path: str, queue: Queue) -> None: """ Publish payload to stoQ queue """ meta = PayloadMeta( extra_data={ 'filename': os.path.basename(path), 'source_dir': os.path.dirname(path), }) with open(path, "rb") as f: queue.put(Payload(f.read(), meta))
def test_scan(self) -> None: s = Stoq( plugin_dir_list=[self.plugin_dir], plugin_opts={ self.plugin_name: {'worker_rules': f'{self.data_dir}/scan_rules.yar'} }, ) plugin = s.load_plugin(self.plugin_name) payload = Payload(self.generic_data) response = plugin.scan(payload, RequestMeta()) self.assertIsInstance(response, WorkerResponse) self.assertEqual('test_scan_rule', response.results['matches'][0]['rule'])
def test_dispatcher_save_false(self) -> None: s = Stoq( plugin_dir_list=[self.plugin_dir], plugin_opts={ self.plugin_name: { 'dispatch_rules': f'{self.data_dir}/dispatch_rules.yar' } }, ) plugin = s.load_plugin(self.plugin_name) payload = Payload(b'save_false') response = plugin.get_dispatches(payload, RequestMeta()) self.assertIsInstance(response, DispatcherResponse) self.assertIn('False', response.meta['save_false']['meta']['save'])
def test_scan(self) -> None: s = Stoq(plugin_dir_list=[self.plugin_dir]) plugin = s.load_plugin(self.plugin_name) payload = Payload(self.generic_data) response = plugin.scan(payload, RequestMeta()) self.assertIsInstance(response, WorkerResponse) self.assertEqual('cfe671457bc475ef2f51cf12b1457475', response.results['md5']) self.assertEqual('f610f70b1464d97f7897fefd6420ffc904df5e4f', response.results['sha1']) self.assertEqual( '2fa284e62b11fea1226b35cdd726a7a56090853ed135240665ceb3939f631af7', response.results['sha256'], )
async def test_dispatcher_create_xorkey(self) -> None: s = Stoq( plugin_dir_list=[self.plugin_dir], plugin_opts={ self.plugin_name: { 'dispatch_rules': f'{self.data_dir}/dispatch_rules.yar' } }, ) plugin = s.load_plugin(self.plugin_name) payload = Payload(b'This program_A}|f5egzrgtx') response = await plugin.get_dispatches(payload, Request()) self.assertIsInstance(response, DispatcherResponse) self.assertEqual( 21, int(response.meta['xordecode']['meta'].get('xorkey', 'None')))
async def _queue(self, path: Path, queue: Queue) -> None: """ Publish payload to stoQ queue """ if path.is_file() and not path.name.startswith('.'): meta = PayloadMeta( extra_data={ 'filename': str(path.name), 'source_dir': str(path.parent), } ) with open(path, "rb") as f: await queue.put(Payload(f.read(), meta)) else: self.log.debug(f'Skipping {path}, does not exist or is invalid')
async def test_scan_strings_limit(self) -> None: s = Stoq( plugin_dir_list=[self.plugin_dir], plugin_opts={ self.plugin_name: { 'worker_rules': f'{self.data_dir}/scan_rules.yar', 'strings_limit': 5, } }, ) plugin = s.load_plugin(self.plugin_name) payload = Payload(self.generic_data * 10) response = await plugin.scan(payload, Request()) self.assertIsInstance(response, WorkerResponse) self.assertEqual('test_scan_rule', response.results['matches'][0]['rule']) self.assertEqual(5, len(response.results['matches'][0]['strings']))
async def test_scan_async(self) -> None: s = Stoq( plugin_dir_list=[self.plugin_dir], plugin_opts={ self.plugin_name: { 'worker_rules': f'{self.data_dir}/scan_rules.yar' } }, ) plugin = s.load_plugin(self.plugin_name) payload = Payload(self.large_data) tasks = [plugin.scan(payload, Request()) for i in range(10)] results = await asyncio.gather(*tasks) for result in results: self.assertIsInstance(result, WorkerResponse) self.assertEqual('test_scan_rule', result.results['matches'][0]['rule'])
async def test_dispatcher_create_xor_info(self) -> None: s = Stoq( plugin_dir_list=[self.plugin_dir], plugin_opts={ self.plugin_name: { 'dispatch_rules': f'{self.data_dir}/dispatch_rules.yar', 'xor_first_match': False, }, }, ) plugin = s.load_plugin(self.plugin_name) payload = Payload(b'This program_A}|f5egzrgtx Exxc1`c\x7fvbp}`p.') response = await plugin.get_dispatches(payload, Request()) self.assertIsInstance(response, DispatcherResponse) self.assertListEqual([(13, '$this_prog', b'\x15'), (26, '$this_prog_2b', b'\x11\x10')], response.meta['xordecode']['meta'].get( 'xor_info', '[]'))
def test_scan_meta_bytes(self) -> None: s = Stoq( plugin_dir_list=[self.plugin_dir], plugin_opts={ self.plugin_name: {'worker_rules': f'{self.data_dir}/scan_rules.yar'} }, ) plugin = s.load_plugin(self.plugin_name) payload = Payload(b'meta_bytes') response = plugin.scan(payload, RequestMeta()) self.assertIsInstance(response, WorkerResponse) self.assertEqual( 'test_scan_metadata_bytes', response.results['matches'][0]['rule'] ) self.assertEqual('ANeato', response.results['matches'][0]['meta']['bytes']) self.assertEqual( 'Peter Rabbit', response.results['matches'][0]['meta']['author'] ) self.assertEqual('save_false', response.results['matches'][0]['meta']['plugin'])
async def ingest(self, queue: Queue) -> None: """ Monitor a directory for newly created files for ingest """ self.log.info( f'Monitoring {self.source_dir} for newly created files...') async for changes in awatch(self.source_dir): for change in list(changes): event = change[0] src_path = os.path.abspath(change[1]) # Only handle Change.added if event != 1: continue meta = PayloadMeta( extra_data={ 'filename': os.path.basename(src_path), 'source_dir': os.path.dirname(src_path), }) with open(src_path, 'rb') as f: payload = Payload(f.read(), meta) await queue.put(payload)
def ingest(self, queue: Queue) -> None: if self.RAISE_EXCEPTION: raise RuntimeError('Test exception, please ignore') queue.put(Payload(b'Important stuff'))
def test_scan_invalid_payload(self) -> None: s = Stoq(plugin_dir_list=[self.plugin_dir]) plugin = s.load_plugin(self.plugin_name) payload = Payload(b'definitely not a javaclass payload') with self.assertRaises(StoqPluginException): response = plugin.scan(payload, RequestMeta())
async def ingest(self, queue: Queue) -> None: for time_slice in self._generate_dates(self.time_since): params = {'apikey': self.apikey, 'package': time_slice} response = requests.get(self.API_URL, params=params) for line in self._decompress(response.content): await queue.put(Payload(line))