def test_mailbox_whitelisted(self): # make sure that we do not process whitelisted emails root = create_root_analysis(alert_type='mailbox') root.initialize_storage() root.details = { 'hello': 'world' } shutil.copy(os.path.join('test_data', 'emails', 'splunk_logging.email.rfc822'), os.path.join(root.storage_dir, 'email.rfc822')) file_observable = root.add_observable(F_FILE, 'email.rfc822') file_observable.add_directive(DIRECTIVE_ORIGINAL_EMAIL) file_observable.mark_as_whitelisted() root.save() root.schedule() engine = TestEngine() engine.enable_module('analysis_module_file_type', 'test_groups') engine.enable_module('analysis_module_email_analyzer', 'test_groups') engine.enable_module('analysis_module_mailbox_email_analyzer', 'test_groups') engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=root.storage_dir) root.load() # we should still have our old details self.assertTrue('hello' in root.details) # and we should NOT have the email details merged in since it's not a mailbox analysis self.assertFalse('email' in root.details) # and we should be whitelisted at this point self.assertTrue(root.whitelisted)
def test_mailbox(self): import saq.modules.email root = create_root_analysis(alert_type='mailbox') root.initialize_storage() root.details = { 'hello': 'world' } shutil.copy(os.path.join('test_data', 'emails', 'splunk_logging.email.rfc822'), os.path.join(root.storage_dir, 'email.rfc822')) file_observable = root.add_observable(F_FILE, 'email.rfc822') file_observable.add_directive(DIRECTIVE_ORIGINAL_EMAIL) root.save() root.schedule() engine = TestEngine() engine.enable_module('analysis_module_file_type', 'test_groups') engine.enable_module('analysis_module_email_analyzer', 'test_groups') engine.enable_module('analysis_module_mailbox_email_analyzer', 'test_groups') engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=root.storage_dir) root.load() # we should still have our old details self.assertTrue('hello' in root.details) # merged in with our email analysis self.assertTrue('email' in root.details) self.assertIsNotNone(root.details['email']) self.assertTrue(root.description.startswith(saq.modules.email.MAILBOX_ALERT_PREFIX))
def test_bro_http_analyzer(self): saq.CONFIG['analysis_mode_http']['cleanup'] = 'no' root = create_root_analysis(alert_type=ANALYSIS_TYPE_BRO_HTTP, analysis_mode=ANALYSIS_MODE_HTTP) root.initialize_storage() root.details = {} for file_name in [ 'CZZiJd1zicZKNMMrV1.0.ready', 'CZZiJd1zicZKNMMrV1.0.reply', 'CZZiJd1zicZKNMMrV1.0.reply.entity', 'CZZiJd1zicZKNMMrV1.0.request' ]: source_path = os.path.join('test_data', 'http_streams', file_name) dest_path = os.path.join(root.storage_dir, file_name) shutil.copy(source_path, dest_path) root.add_observable(F_FILE, file_name) root.save() root.schedule() engine = TestEngine(analysis_pools={ANALYSIS_MODE_HTTP: 1}, local_analysis_modes=[ANALYSIS_MODE_HTTP]) engine.enable_module('analysis_module_bro_http_analyzer', ANALYSIS_MODE_HTTP) engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=root.storage_dir) root.load() self.verify(root)
def test_detection(self, db, c): root = create_root_analysis(uuid=str(uuid.uuid4())) root.initialize_storage() observable = root.add_observable(F_TEST, 'test_7') root.save() root.schedule() engine = TestEngine( local_analysis_modes=['test_groups', ANALYSIS_MODE_CORRELATION]) engine.enable_alerting() engine.enable_module('analysis_module_basic_test') engine.controlled_stop() engine.start() engine.wait() # analysis will have moved over to data dir now root = RootAnalysis(storage_dir=storage_dir_from_uuid(root.uuid)) root.load() # make sure we detected the change in modes self.assertTrue( log_count( 'analysis mode for RootAnalysis({}) changed from test_groups to correlation' .format(root.uuid)) > 0) self.assertEquals( log_count('completed analysis RootAnalysis({})'.format(root.uuid)), 2)
def test_intel_analysis(self): if not saq.CONFIG['sip'].getboolean('enabled'): return root = create_root_analysis(analysis_mode=ANALYSIS_MODE_CORRELATION) root.initialize_storage() i = root.add_observable(F_INDICATOR, 'sip:{}'.format(self.test_indicator_id)) self.assertIsNotNone(i) root.save() root.schedule() engine = TestEngine(local_analysis_modes=[ANALYSIS_MODE_CORRELATION]) engine.enable_module('analysis_module_intel_analyzer', ANALYSIS_MODE_CORRELATION) engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=root.storage_dir) root.load() i = root.get_observable(i.id) self.assertIsNotNone(i) from saq.modules.intel import IntelAnalysis analysis = i.get_analysis(IntelAnalysis) self.assertIsNotNone(analysis) # what we get here should be the same as what we got when we inserted it self.assertEquals(analysis.details, self.test_indicator)
def test_submit_alert(self): # disable cleaup for analysis mode analysis saq.CONFIG['analysis_mode_analysis']['cleanup'] = 'no' self.start_api_server() root = create_root_analysis(analysis_mode=ANALYSIS_MODE_ANALYSIS) root.initialize_storage() url = root.add_observable(F_URL, TEST_URL) url.add_directive(DIRECTIVE_CRAWL) root.save() root.schedule() engine = TestEngine(analysis_pools={ ANALYSIS_MODE_ANALYSIS: 1, ANALYSIS_MODE_CLOUDPHISH: 1 }, local_analysis_modes=[ ANALYSIS_MODE_ANALYSIS, ANALYSIS_MODE_CLOUDPHISH ]) engine.enable_module('analysis_module_cloudphish', ANALYSIS_MODE_ANALYSIS) engine.enable_module('analysis_module_cloudphish_request_analyzer', ANALYSIS_MODE_CLOUDPHISH) engine.enable_module('analysis_module_crawlphish', ANALYSIS_MODE_CLOUDPHISH) engine.enable_module('analysis_module_forced_detection', ANALYSIS_MODE_CLOUDPHISH) engine.enable_module('analysis_module_detection', ANALYSIS_MODE_CLOUDPHISH) engine.start() # should see cloudphish module complete wait_for_log_count('analysis CloudphishAnalysis is completed', 1, 10) engine.controlled_stop() engine.wait() # check the results root = RootAnalysis(storage_dir=storage_dir_from_uuid(root.uuid)) root.load() url = root.get_observable(url.id) self.assertIsNotNone(url) # this url should now have 3 analysis objects attached to it (cloudphish, crawlphish and forced detection) self.assertEquals(len(url.analysis), 3) from saq.modules.cloudphish import CloudphishAnalysis cloudphish_analysis = url.get_analysis(CloudphishAnalysis) self.assertIsNotNone(cloudphish_analysis) self.assertEquals(cloudphish_analysis.analysis_result, SCAN_RESULT_ALERT) from saq.modules.url import CrawlphishAnalysisV2 crawlphish_analysis = url.get_analysis(CrawlphishAnalysisV2) self.assertIsNotNone(crawlphish_analysis)
def test_detection(self, db, c): root = create_root_analysis(uuid=str(uuid.uuid4())) root.initialize_storage() observable = root.add_observable(F_TEST, 'test_7') root.save() root.schedule() engine = TestEngine(local_analysis_modes=[ 'test_groups', saq.CONFIG['analysis_module_detection'] ['target_mode'] ]) engine.enable_module('analysis_module_basic_test') engine.enable_module('analysis_module_detection', 'test_groups') engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=root.storage_dir) root.load() # the analysis mode should have changed self.assertEquals( root.analysis_mode, saq.CONFIG['analysis_module_detection']['target_mode']) # make sure we detected the change in modes self.assertTrue( log_count( 'analysis mode for RootAnalysis({}) changed from test_groups to correlation' .format(root.uuid)) > 0) self.assertEquals( log_count('completed analysis RootAnalysis({})'.format(root.uuid)), 2)
def test_detections_000_ole(self): submissions = {} # key = storage_dir, value = path to file for file_name in os.listdir(OFFICE_SAMPLES): source_path = os.path.join(OFFICE_SAMPLES, file_name) root = create_root_analysis(uuid=str(uuid.uuid4())) root.initialize_storage() shutil.copy(source_path, root.storage_dir) root.add_observable(F_FILE, file_name) root.save() root.schedule() submissions[root.storage_dir] = source_path engine = TestEngine() engine.enable_module('analysis_module_archive') engine.enable_module('analysis_module_file_type') engine.enable_module('analysis_module_olevba_v1_1') engine.enable_module('analysis_module_officeparser_v1_0') engine.enable_module('analysis_module_yara_scanner_v3_4') engine.controlled_stop() engine.start() engine.wait() for storage_dir in submissions: with self.subTest(storage_dir=storage_dir, source_path=submissions[storage_dir]): root = RootAnalysis() root.storage_dir = storage_dir root.load() detections = root.all_detection_points self.assertGreater(len(detections), 0)
def test_mhtml_analysis(self): root = create_root_analysis(analysis_mode='test_groups') root.initialize_storage() shutil.copy(os.path.join('test_data', 'mhtml', 'Invoice_PDF.mht'), root.storage_dir) file_observable = root.add_observable(F_FILE, 'Invoice_PDF.mht') root.save() root.schedule() engine = TestEngine(pool_size_limit=1) engine.enable_alerting() engine.enable_module('analysis_module_mhtml', 'test_groups') engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=root.storage_dir) root.load() file_observable = root.get_observable(file_observable.id) self.assertIsNotNone(file_observable) from saq.modules.file_analysis import MHTMLAnalysis analysis = file_observable.get_analysis(MHTMLAnalysis) self.assertIsNotNone(analysis) # should have extracted a single file self.assertEquals(len(analysis.details), 1) self.assertEquals(len(analysis.get_observables_by_type(F_FILE)), 1)
def test_no_detection(self, db, c): root = create_root_analysis(uuid=str(uuid.uuid4()), analysis_mode='test_groups') root.initialize_storage() observable = root.add_observable(F_TEST, 'test_1') root.save() root.schedule() engine = TestEngine() engine.enable_alerting() engine.enable_module('analysis_module_basic_test') engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=root.storage_dir) root.load() # the analysis mode should be the same self.assertEquals(root.analysis_mode, 'test_groups') # make sure we detected the change in modes self.assertEquals( log_count( 'analysis mode for RootAnalysis({}) changed from test_empty to correlation' .format(root.uuid)), 0) self.assertEquals( log_count('completed analysis RootAnalysis({})'.format(root.uuid)), 1)
def test_no_mailbox(self): # make sure that when we analyze emails in non-mailbox analysis that we don't treat it like it came from mailbox root = create_root_analysis(alert_type='not-mailbox') # <-- different alert_type root.initialize_storage() root.details = { 'hello': 'world' } shutil.copy(os.path.join('test_data', 'emails', 'splunk_logging.email.rfc822'), os.path.join(root.storage_dir, 'email.rfc822')) file_observable = root.add_observable(F_FILE, 'email.rfc822') file_observable.add_directive(DIRECTIVE_ORIGINAL_EMAIL) root.save() root.schedule() engine = TestEngine() engine.enable_module('analysis_module_file_type', 'test_groups') engine.enable_module('analysis_module_email_analyzer', 'test_groups') engine.enable_module('analysis_module_mailbox_email_analyzer', 'test_groups') engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=root.storage_dir) root.load() # we should still have our old details self.assertTrue('hello' in root.details) # and we should NOT have the email details merged in since it's not a mailbox analysis self.assertFalse('email' in root.details)
def test_faqueue_alert(self): if not integration_enabled('crits'): self.skipTest("crits integration not enabled") root = create_root_analysis(analysis_mode=ANALYSIS_MODE_CORRELATION, alert_type=ANALYSIS_TYPE_FAQUEUE) root.initialize_storage() root.details = {'indicator': {'crits_id': '5c3c9e42ad951d6254d20f98'}} root.save() root.schedule() engine = TestEngine(local_analysis_modes=[ANALYSIS_MODE_CORRELATION]) engine.enable_module('analysis_module_faqueue_alert_analyzer', ANALYSIS_MODE_CORRELATION) engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=root.storage_dir) root.load() ALERT(root) set_dispositions([root.uuid], DISPOSITION_FALSE_POSITIVE, UNITTEST_USER_ID) engine = TestEngine(local_analysis_modes=[ ANALYSIS_MODE_CORRELATION, ANALYSIS_MODE_DISPOSITIONED ]) engine.enable_module( 'analysis_module_faqueue_alert_analyzer', [ANALYSIS_MODE_CORRELATION, ANALYSIS_MODE_DISPOSITIONED]) engine.controlled_stop() engine.start() engine.wait() self.assertEquals( log_count( 'updating crits_id 5c3c9e42ad951d6254d20f98 to status Informational' ), 1) # change the disposition to anything except FALSE POSITIVE and the indicator becomes ANALYZED set_dispositions([root.uuid], DISPOSITION_WEAPONIZATION, UNITTEST_USER_ID) engine = TestEngine(local_analysis_modes=[ ANALYSIS_MODE_CORRELATION, ANALYSIS_MODE_DISPOSITIONED ]) engine.enable_module( 'analysis_module_faqueue_alert_analyzer', [ANALYSIS_MODE_CORRELATION, ANALYSIS_MODE_DISPOSITIONED]) engine.controlled_stop() engine.start() engine.wait() self.assertEquals( log_count( 'updating crits_id 5c3c9e42ad951d6254d20f98 to status Analyzed' ), 1)
def test_submit_timeout_with_alert(self, db, c): # any cloudphish submission we make can turn into an alert # here we test a cloudphish submission that quickly times out # followed by cloudphish alerting on the submission # set the timeouts really low saq.CONFIG['analysis_module_cloudphish']['frequency'] = '1' saq.CONFIG['analysis_module_cloudphish']['query_timeout'] = '1' # disable cleaup for analysis mode analysis saq.CONFIG['analysis_mode_analysis']['cleanup'] = 'no' self.start_api_server() root = create_root_analysis(analysis_mode=ANALYSIS_MODE_ANALYSIS) root.initialize_storage() url = root.add_observable(F_URL, TEST_URL) url.add_directive(DIRECTIVE_CRAWL) root.save() root.schedule() engine = TestEngine(analysis_pools={}, local_analysis_modes=[ANALYSIS_MODE_ANALYSIS, ANALYSIS_MODE_CLOUDPHISH, ANALYSIS_MODE_CORRELATION]) engine.enable_alerting() engine.enable_module('analysis_module_cloudphish', ANALYSIS_MODE_ANALYSIS) engine.enable_module('analysis_module_cloudphish_request_analyzer', ANALYSIS_MODE_CLOUDPHISH) engine.enable_module('analysis_module_crawlphish', ANALYSIS_MODE_CLOUDPHISH) engine.enable_module('analysis_module_forced_detection', ANALYSIS_MODE_CLOUDPHISH) engine.enable_module('analysis_module_cloudphish_delayed_test', ANALYSIS_MODE_CLOUDPHISH) engine.start() # watch for the original analysis to time out wait_for_log_count('has timed out', 1, 10) # we should see cloudphish eventually complete and alert though engine.controlled_stop() engine.wait() # check the results root = RootAnalysis(storage_dir=root.storage_dir) root.load() url = root.get_observable(url.id) self.assertIsNotNone(url) # should see an error here from saq.modules.cloudphish import CloudphishAnalysis cloudphish_analysis = url.get_analysis(CloudphishAnalysis) self.assertIsNotNone(cloudphish_analysis) self.assertEquals(cloudphish_analysis.result, SCAN_RESULT_ERROR) # however we should have an alert generated c.execute("SELECT COUNT(*) FROM alerts") self.assertEquals(c.fetchone()[0], 1)
def _get_cached_analysis(url, db, c): sha256 = hash_url(url) # have we already requested and/or processed this URL before? c.execute("""SELECT ar.status, ar.result, ar.http_result_code, ar.http_message, HEX(ar.sha256_content), cm.node, cm.name, ar.uuid FROM cloudphish_analysis_results AS ar LEFT JOIN cloudphish_content_metadata AS cm ON ar.sha256_content = cm.sha256_content WHERE sha256_url = UNHEX(%s)""", (sha256,)) row = c.fetchone() if row: status, result, http_result, http_message, sha256_content, node, file_name, uuid = row if file_name: file_name = file_name.decode('unicode_internal') storage_dir = storage_dir_from_uuid(uuid) root_details = None if os.path.exists(storage_dir): try: root = RootAnalysis(storage_dir=storage_dir_from_uuid(uuid)) root.load() root_details = root.details except Exception as e: # this isn't really an error -- another process may be in the middle of processing this url # the database contents should be correct though logging.debug("unable to load cloudphish analysis {}: {}".format(uuid, e)) #report_exception() # keep track of the most popular URLs # old URLs get cleaned out c.execute("UPDATE cloudphish_url_lookup SET last_lookup = NOW() WHERE sha256_url = UNHEX(%s)", (sha256,)) db.commit() return CloudphishAnalysisResult(RESULT_OK, # result root_details, # details status=status, analysis_result=result, http_result=http_result, http_message=http_message, sha256_content=sha256_content, sha256_url=sha256, location=node, file_name=file_name, uuid=uuid) # if we have not then we return None return None
def test_hal9000_alert_no_disposition(self, db, c): # same as above except we end up alerting root = create_root_analysis(analysis_mode=ANALYSIS_MODE_ANALYSIS) root.initialize_storage() test_observable = root.add_observable(F_TEST, 'test') root.save() root.schedule() engine = TestEngine(local_analysis_modes=[ ANALYSIS_MODE_ANALYSIS, ANALYSIS_MODE_CORRELATION ]) engine.enable_alerting() engine.set_cleanup(ANALYSIS_MODE_ANALYSIS, False) engine.enable_module('analysis_module_forced_detection', ANALYSIS_MODE_ANALYSIS) engine.enable_module('analysis_module_hal9000', [ ANALYSIS_MODE_ANALYSIS, ANALYSIS_MODE_CORRELATION ]) engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=storage_dir_from_uuid(root.uuid)) root.load() # make sure we alerted self.assertEquals(root.analysis_mode, ANALYSIS_MODE_CORRELATION) test_observable = root.get_observable(test_observable.id) self.assertIsNotNone(test_observable) analysis = test_observable.get_analysis(HAL9000Analysis) self.assertIsNotNone(analysis) # total count and mal count should both be 0 self.assertEquals(analysis.total_count, 0) self.assertEquals(analysis.mal_count, 0) # we should have a single entry in the database for this observable hal9000_id = _compute_hal9000_md5(test_observable) # since we have NOT set a disposition yet we should have nothing in the database about it c.execute("SELECT total_count, mal_count FROM observables WHERE id = UNHEX(%s)", (hal9000_id,)) result = c.fetchone() self.assertIsNone(result) # verify the correct state is kept state = root.state['hal9000'] self.assertTrue(STATE_KEY_ID_TRACKING in state) tracking = state[STATE_KEY_ID_TRACKING] self.assertTrue(hal9000_id in tracking) tracking_info = tracking[hal9000_id] self.assertTrue('id' in tracking_info) self.assertEquals(tracking_info['id'], test_observable.id) self.assertTrue(KEY_TOTAL_COUNT in tracking_info) self.assertTrue(KEY_MAL_COUNT in tracking_info) # we have not made any changes yet either self.assertIsNone(tracking_info[KEY_TOTAL_COUNT]) self.assertIsNone(tracking_info[KEY_MAL_COUNT])
def test_submit_double_alert(self, db, c): # in this scenario we alert both with the original submission # and with the cloudphish submission self.start_api_server() root = create_root_analysis(analysis_mode=ANALYSIS_MODE_ANALYSIS) root.initialize_storage() url = root.add_observable(F_URL, TEST_URL) url.add_directive(DIRECTIVE_CRAWL) root.save() root.schedule() engine = TestEngine(local_analysis_modes=[ANALYSIS_MODE_ANALYSIS, ANALYSIS_MODE_CLOUDPHISH, ANALYSIS_MODE_CORRELATION]) engine.enable_alerting() engine.enable_module('analysis_module_cloudphish', ANALYSIS_MODE_ANALYSIS) engine.enable_module('analysis_module_cloudphish_request_analyzer', ANALYSIS_MODE_CLOUDPHISH) engine.enable_module('analysis_module_crawlphish', ANALYSIS_MODE_CLOUDPHISH) engine.enable_module('analysis_module_forced_detection', ANALYSIS_MODE_CLOUDPHISH) engine.start() # should see cloudphish module complete wait_for_log_count('analysis CloudphishAnalysis is completed', 1, 10) engine.controlled_stop() engine.wait() # check the results root = RootAnalysis(storage_dir=storage_dir_from_uuid(root.uuid)) root.load() url = root.get_observable(url.id) self.assertIsNotNone(url) # this url should now have 3 analysis objects attached to it (cloudphish, crawlphish and forced detection) self.assertEquals(len(url.analysis), 3) from saq.modules.cloudphish import CloudphishAnalysis cloudphish_analysis = url.get_analysis(CloudphishAnalysis) self.assertIsNotNone(cloudphish_analysis) self.assertEquals(cloudphish_analysis.analysis_result, SCAN_RESULT_ALERT) from saq.modules.url import CrawlphishAnalysisV2 crawlphish_analysis = url.get_analysis(CrawlphishAnalysisV2) self.assertIsNotNone(crawlphish_analysis) # there should be two alerts generated in the database c.execute("SELECT COUNT(*) FROM alerts") self.assertEquals(c.fetchone()[0], 2) # the cloudphish alert should have a reference back to the original alert self.assertEquals(cloudphish_analysis.context['c'], root.uuid)
def test_hal9000_no_alert(self, db, c): root = create_root_analysis(analysis_mode=ANALYSIS_MODE_ANALYSIS) root.initialize_storage() test_observable = root.add_observable(F_TEST, 'test') root.save() root.schedule() engine = TestEngine(local_analysis_modes=[ANALYSIS_MODE_ANALYSIS]) engine.set_cleanup(ANALYSIS_MODE_ANALYSIS, False) engine.enable_module('analysis_module_hal9000', ANALYSIS_MODE_ANALYSIS) engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=root.storage_dir) root.load() # make sure we did NOT alert self.assertEquals(root.analysis_mode, ANALYSIS_MODE_ANALYSIS) test_observable = root.get_observable(test_observable.id) self.assertIsNotNone(test_observable) analysis = test_observable.get_analysis(HAL9000Analysis) self.assertIsNotNone(analysis) # total count and mal count should both be 0 self.assertEquals(analysis.total_count, 0) self.assertEquals(analysis.mal_count, 0) # we should have a single entry in the database for this observable hal9000_id = _compute_hal9000_md5(test_observable) c.execute( "SELECT total_count, mal_count FROM observables WHERE id = UNHEX(%s)", (hal9000_id, )) result = c.fetchone() self.assertIsNotNone(result) self.assertEquals(result[0], 1) self.assertEquals(result[1], 0) # verify the correct state is kept state = root.state['hal9000'] self.assertTrue(STATE_KEY_ID_TRACKING in state) tracking = state[STATE_KEY_ID_TRACKING] self.assertTrue(hal9000_id in tracking) tracking_info = tracking[hal9000_id] self.assertTrue('id' in tracking_info) self.assertEquals(tracking_info['id'], test_observable.id) self.assertTrue(KEY_TOTAL_COUNT in tracking_info) self.assertTrue(KEY_MAL_COUNT in tracking_info) # since this doesn't become an alert we don't bother tracking the changes self.assertIsNone(tracking_info[KEY_TOTAL_COUNT]) self.assertIsNone(tracking_info[KEY_MAL_COUNT])
def get_details(uuid, name): root = RootAnalysis(storage_dir=storage_dir_from_uuid(uuid)) root.load() # find the analysis with this name for analysis in root.all_analysis: if analysis.external_details_path == name: analysis.load() return json_result({'result': analysis.details}) abort(Response("invalid uuid or invalid details name", 400))
def get_analysis(uuid): storage_dir = storage_dir_from_uuid(uuid) if saq.CONFIG['engine']['work_dir'] and not os.path.isdir(storage_dir): storage_dir = workload_storage_dir(uuid) if not os.path.exists(storage_dir): abort(Response("invalid uuid {}".format(uuid), 400)) root = RootAnalysis(storage_dir=storage_dir) root.load() return json_result({'result': root.json})
def test_faqueue_alert(self): if not saq.CONFIG['sip'].getboolean('enabled'): return root = create_root_analysis(analysis_mode=ANALYSIS_MODE_CORRELATION, alert_type=ANALYSIS_TYPE_FAQUEUE) root.initialize_storage() root.details = {'indicator': {'sip_id': '1'}} root.save() root.schedule() engine = TestEngine(local_analysis_modes=[ANALYSIS_MODE_CORRELATION]) engine.enable_module('analysis_module_faqueue_sip_alert_analyzer', ANALYSIS_MODE_CORRELATION) engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=root.storage_dir) root.load() ALERT(root) set_dispositions([root.uuid], DISPOSITION_FALSE_POSITIVE, UNITTEST_USER_ID) engine = TestEngine(local_analysis_modes=[ ANALYSIS_MODE_CORRELATION, ANALYSIS_MODE_DISPOSITIONED ]) engine.enable_module( 'analysis_module_faqueue_sip_alert_analyzer', [ANALYSIS_MODE_CORRELATION, ANALYSIS_MODE_DISPOSITIONED]) engine.controlled_stop() engine.start() engine.wait() self.assertEquals( log_count('updating sip_id 1 to status Informational'), 1) # change the disposition to anything except FALSE POSITIVE and the indicator becomes ANALYZED set_dispositions([root.uuid], DISPOSITION_WEAPONIZATION, UNITTEST_USER_ID) engine = TestEngine(local_analysis_modes=[ ANALYSIS_MODE_CORRELATION, ANALYSIS_MODE_DISPOSITIONED ]) engine.enable_module( 'analysis_module_faqueue_sip_alert_analyzer', [ANALYSIS_MODE_CORRELATION, ANALYSIS_MODE_DISPOSITIONED]) engine.controlled_stop() engine.start() engine.wait() self.assertEquals(log_count('updating sip_id 1 to status Analyzed'), 1)
def test_mailbox_submission(self): from flask import url_for from saq.analysis import _JSONEncoder from saq.modules.email import EmailAnalysis t = saq.LOCAL_TIMEZONE.localize(datetime.datetime.now()).astimezone(pytz.UTC).strftime(event_time_format_json_tz) with open(os.path.join('test_data', 'emails', 'splunk_logging.email.rfc822'), 'rb') as fp: result = self.client.post(url_for('analysis.submit'), data={ 'analysis': json.dumps({ 'analysis_mode': 'email', 'tool': 'unittest', 'tool_instance': 'unittest_instance', 'type': 'mailbox', 'description': 'testing', 'event_time': t, 'details': { }, 'observables': [ { 'type': F_FILE, 'value': 'rfc822.email', 'time': t, 'tags': [], 'directives': [ DIRECTIVE_ORIGINAL_EMAIL ], 'limited_analysis': [] }, ], 'tags': [ ], }, cls=_JSONEncoder), 'file': (fp, 'rfc822.email'), }, content_type='multipart/form-data') result = result.get_json() self.assertIsNotNone(result) self.assertTrue('result' in result) result = result['result'] self.assertIsNotNone(result['uuid']) uuid = result['uuid'] # make sure we don't clean up the anaysis so we can check it saq.CONFIG['analysis_mode_email']['cleanup'] = 'no' engine = TestEngine(local_analysis_modes=['email']) engine.enable_module('analysis_module_file_type', 'email') engine.enable_module('analysis_module_email_analyzer', 'email') engine.enable_module('analysis_module_mailbox_email_analyzer', 'email') engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=storage_dir_from_uuid(uuid)) root.load() observable = root.find_observable(lambda o: o.has_directive(DIRECTIVE_ORIGINAL_EMAIL)) self.assertIsNotNone(observable) analysis = observable.get_analysis(EmailAnalysis) self.assertIsNotNone(analysis) # these should be the same self.assertEquals(analysis.details, root.details)
def test_submit_with_utc_timezone(self): # make sure we can submit with a UTC timezone already set result = self._submit(event_time=self._get_localized_submit_time()) self.assertIsNotNone(result) self.assertTrue('result' in result) result = result['result'] self.assertIsNotNone(result['uuid']) uuid = result['uuid'] root = RootAnalysis(storage_dir=storage_dir_from_uuid(uuid)) root.load() self.assertEquals(root.event_time, self._get_localized_submit_time())
def test_download(self): # first create something to download root = create_root_analysis(uuid=str(uuid.uuid4())) root.initialize_storage() root.details = {'hello': 'world'} with open(os.path.join(root.storage_dir, 'test.dat'), 'w') as fp: fp.write('test') file_observable = root.add_observable(F_FILE, 'test.dat') root.save() # ask for a download result = self.client.get(url_for('engine.download', uuid=root.uuid)) # we should get back a tar file tar_path = os.path.join(saq.TEMP_DIR, 'download.tar') output_dir = os.path.join(saq.TEMP_DIR, 'download') try: with open(tar_path, 'wb') as fp: for chunk in result.response: fp.write(chunk) with tarfile.open(name=tar_path, mode='r|') as tar: tar.extractall(path=output_dir) root = RootAnalysis(storage_dir=output_dir) root.load() self.assertTrue('hello' in root.details) self.assertEquals('world', root.details['hello']) file_observable = root.get_observable(file_observable.id) self.assertTrue( os.path.exists( os.path.join(root.storage_dir, file_observable.value))) with open(os.path.join(root.storage_dir, file_observable.value), 'r') as fp: self.assertEquals(fp.read(), 'test') finally: try: os.remove(tar_path) except: pass try: shutil.rmtree(output_dir) except: pass
def test_submit_forced_download(self): # disable cleaup for analysis mode analysis saq.CONFIG['analysis_mode_analysis']['cleanup'] = 'no' self.start_api_server() root = create_root_analysis(analysis_mode=ANALYSIS_MODE_ANALYSIS) root.initialize_storage() url = root.add_observable(F_URL, TEST_URL) url.add_directive(DIRECTIVE_CRAWL) url.add_directive(DIRECTIVE_FORCE_DOWNLOAD) root.save() root.schedule() engine = TestEngine(analysis_pools={ANALYSIS_MODE_ANALYSIS: 1, ANALYSIS_MODE_CLOUDPHISH: 1}, local_analysis_modes=[ANALYSIS_MODE_ANALYSIS, ANALYSIS_MODE_CLOUDPHISH]) engine.enable_module('analysis_module_cloudphish', ANALYSIS_MODE_ANALYSIS) engine.enable_module('analysis_module_cloudphish_request_analyzer', ANALYSIS_MODE_CLOUDPHISH) engine.enable_module('analysis_module_crawlphish', ANALYSIS_MODE_CLOUDPHISH) engine.start() # should see cloudphish module complete wait_for_log_count('analysis CloudphishAnalysis is completed', 1, 10) engine.controlled_stop() engine.wait() # check the results root = RootAnalysis(storage_dir=root.storage_dir) root.load() url = root.get_observable(url.id) self.assertIsNotNone(url) # should only have 1 analysis attached to the url self.assertEquals(len(url.analysis), 1) from saq.modules.cloudphish import CloudphishAnalysis cloudphish_analysis = url.get_analysis(CloudphishAnalysis) self.assertIsNotNone(cloudphish_analysis) self.assertEquals(cloudphish_analysis.analysis_result, SCAN_RESULT_CLEAR) # however there should be a file attached self.assertEquals(len(cloudphish_analysis.observables), 1) self.assertEquals(cloudphish_analysis.observables[0].type, F_FILE) self.assertEquals(cloudphish_analysis.observables[0].value, 'Payment_Advice.pdf') self.assertTrue(os.path.exists(os.path.join(root.storage_dir, cloudphish_analysis.observables[0].value)))
def test_bro_smtp_stream_submission(self): from flask import url_for from saq.analysis import _JSONEncoder from saq.modules.email import EmailAnalysis, BroSMTPStreamAnalysis t = saq.LOCAL_TIMEZONE.localize(datetime.datetime.now()).astimezone(pytz.UTC).strftime(event_time_format_json_tz) with open(os.path.join('test_data', 'smtp_streams', 'CBmtfvapmTMqCEUw6'), 'rb') as fp: result = self.client.post(url_for('analysis.submit'), data={ 'analysis': json.dumps({ 'analysis_mode': ANALYSIS_MODE_EMAIL, 'tool': 'unittest', 'tool_instance': 'unittest_instance', 'type': ANALYSIS_TYPE_BRO_SMTP, 'description': 'BRO SMTP Scanner Detection - ', 'event_time': t, 'details': { }, 'observables': [ { 'type': F_FILE, 'value': 'CBmtfvapmTMqCEUw6', 'time': t, 'tags': [], 'directives': [ DIRECTIVE_ORIGINAL_SMTP ], 'limited_analysis': [] }, ], 'tags': [ ], }, cls=_JSONEncoder), 'file': (fp, 'CBmtfvapmTMqCEUw6'), }, content_type='multipart/form-data') result = result.get_json() self.assertIsNotNone(result) self.assertTrue('result' in result) result = result['result'] self.assertIsNotNone(result['uuid']) uuid = result['uuid'] # make sure we don't clean up the anaysis so we can check it saq.CONFIG['analysis_mode_email']['cleanup'] = 'no' engine = TestEngine(local_analysis_modes=[ANALYSIS_MODE_EMAIL]) engine.enable_module('analysis_module_file_type', 'email') engine.enable_module('analysis_module_email_analyzer', 'email') engine.enable_module('analysis_module_bro_smtp_analyzer', 'email') engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=storage_dir_from_uuid(uuid)) root.load() observable = root.find_observable(lambda o: o.has_directive(DIRECTIVE_ORIGINAL_SMTP)) self.assertIsNotNone(observable) analysis = observable.get_analysis(BroSMTPStreamAnalysis) self.assertIsNotNone(analysis)
def test_request_limit(self): # only allow one request saq.CONFIG['analysis_module_cloudphish'][ 'cloudphish_request_limit'] = '1' # don't clear the analysis saq.CONFIG['analysis_mode_analysis']['cleanup'] = 'no' self.start_api_server() root = create_root_analysis(analysis_mode=ANALYSIS_MODE_ANALYSIS) root.initialize_storage() url_1 = root.add_observable(F_URL, TEST_URL) url_2 = root.add_observable(F_URL, 'http://invalid_domain.local/some/path') root.save() root.schedule() engine = TestEngine(analysis_pools={}, local_analysis_modes=[ ANALYSIS_MODE_ANALYSIS, ANALYSIS_MODE_CLOUDPHISH ]) engine.enable_module('analysis_module_cloudphish', ANALYSIS_MODE_ANALYSIS) engine.enable_module('analysis_module_cloudphish_request_analyzer', ANALYSIS_MODE_CLOUDPHISH) engine.enable_module('analysis_module_crawlphish', ANALYSIS_MODE_CLOUDPHISH) engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=root.storage_dir) root.load() url_1 = root.get_observable(url_1.id) url_2 = root.get_observable(url_2.id) from saq.modules.cloudphish import CloudphishAnalysis analysis_1 = url_1.get_analysis(CloudphishAnalysis) analysis_2 = url_2.get_analysis(CloudphishAnalysis) self.assertTrue( (isinstance(analysis_1, Analysis) and analysis_2 is False) or (analysis_1 is False and isinstance(analysis_2, Analysis))) self.assertEquals(log_count('reached cloudphish limit'), 1)
def get_details(uuid, name): storage_dir = storage_dir_from_uuid(uuid) if saq.CONFIG['engine']['work_dir'] and not os.path.isdir(storage_dir): storage_dir = workload_storage_dir(uuid) root = RootAnalysis(storage_dir=storage_dir) root.load() # find the analysis with this name for analysis in root.all_analysis: if analysis.external_details_path == name: #analysis.load() return json_result({'result': analysis.details}) abort(Response("invalid uuid or invalid details name", 400))
def test_download(self): root = create_root_analysis(uuid=str(uuid.uuid4())) root.initialize_storage() root.details = {'hello': 'world'} root.save() temp_dir = tempfile.mkdtemp(dir=saq.TEMP_DIR) try: result = ace_api.download(root.uuid, temp_dir) self.assertTrue(os.path.join(temp_dir, 'data.json')) root = RootAnalysis(storage_dir=temp_dir) root.load() self.assertEquals(root.details, {'hello': 'world'}) finally: shutil.rmtree(temp_dir)
def test_upload(self): root = create_root_analysis(uuid=str(uuid.uuid4()), storage_dir=os.path.join( saq.TEMP_DIR, 'unittest')) root.initialize_storage() root.details = {'hello': 'world'} root.save() result = ace_api.upload(root.uuid, root.storage_dir) self.assertTrue(result['result']) root = RootAnalysis(storage_dir=storage_dir_from_uuid(root.uuid)) root.load() self.assertEquals(root.details, {'hello': 'world'})
def test_bro_smtp_stream_analysis_no_end_command(self): import saq import saq.modules.email # test the same thing as test_bro_smtp_stream_analysis except we remove the > . . saq.CONFIG['analysis_mode_email']['cleanup'] = 'no' root = create_root_analysis(alert_type=ANALYSIS_TYPE_BRO_SMTP, analysis_mode=ANALYSIS_MODE_EMAIL) root.initialize_storage() root.details = { } shutil.copy(os.path.join('test_data', 'smtp_streams', 'CBmtfvapmTMqCEUw6.missing_end'), os.path.join(root.storage_dir, 'CBmtfvapmTMqCEUw6')) file_observable = root.add_observable(F_FILE, 'CBmtfvapmTMqCEUw6') file_observable.add_directive(DIRECTIVE_ORIGINAL_SMTP) file_observable.add_directive(DIRECTIVE_NO_SCAN) root.save() root.schedule() engine = TestEngine(local_analysis_modes=[ANALYSIS_MODE_EMAIL]) engine.enable_module('analysis_module_file_type', 'test_groups') engine.enable_module('analysis_module_email_analyzer', 'test_groups') engine.enable_module('analysis_module_bro_smtp_analyzer', 'test_groups') engine.controlled_stop() engine.start() engine.wait() root = RootAnalysis(storage_dir=root.storage_dir) root.load() file_observable = root.get_observable(file_observable.id) self.assertIsNotNone(file_observable) analysis = file_observable.get_analysis(saq.modules.email.BroSMTPStreamAnalysis) self.assertIsNotNone(analysis) self.assertEquals(len(analysis.get_observables_by_type(F_FILE)), 1) self.assertEquals(len(analysis.get_observables_by_type(F_EMAIL_ADDRESS)), 2) self.assertEquals(len(analysis.get_observables_by_type(F_IPV4)), 1) self.assertEquals(len(analysis.get_observables_by_type(F_EMAIL_CONVERSATION)), 1) self.assertTrue(saq.modules.email.KEY_CONNECTION_ID in analysis.details) self.assertTrue(saq.modules.email.KEY_SOURCE_IPV4 in analysis.details) self.assertTrue(saq.modules.email.KEY_SOURCE_PORT in analysis.details) self.assertTrue(saq.modules.email.KEY_ENV_MAIL_FROM in analysis.details) self.assertTrue(saq.modules.email.KEY_ENV_RCPT_TO in analysis.details) email_file = analysis.find_observable(lambda o: o.type == F_FILE) self.assertIsNotNone(email_file) self.assertEquals(email_file.value, 'email.rfc822') email_analysis = email_file.get_analysis(saq.modules.email.EmailAnalysis) self.assertIsNotNone(email_analysis)