def insert_alert(self): root = create_root_analysis(uuid=str(uuid.uuid4())) root.initialize_storage() root.save() alert = Alert(storage_dir=root.storage_dir) alert.load() alert.sync() self.assertIsNotNone(alert.id) return alert
def test_insert_alert(self): #root_analysis = create_root_analysis() #root_analysis.save() #alert = Alert(storage_dir=root_analysis.storage_dir) #alert.load() #alert.sync() # make an alert with a description that is too long root_analysis = create_root_analysis(desc='A' * 1025) root_analysis.save() alert = Alert(storage_dir=root_analysis.storage_dir) alert.load() alert.sync() self.assertEquals(len(alert.description), 1024)
def test_sync_observable_mapping(self): root_analysis = create_root_analysis() root_analysis.save() alert = Alert(storage_dir=root_analysis.storage_dir) alert.load() alert.sync() o1 = alert.add_observable(F_TEST, 'test_1') alert.sync_observable_mapping(o1) from saq.database import Observable, ObservableMapping from sqlalchemy import func observable = saq.db.query(Observable).filter( Observable.type == o1.type, Observable.md5 == func.UNHEX(o1.md5_hex)).first() self.assertIsNotNone(observable)
def test_cloudphish_engine_006_http_download_alert(self): url = 'http://valvoline.com/' json_data = self.generate_cloudphish_alert(url) # download the alert data result = self.client.get('/cloudphish/download_alert?s={}'.format( json_data['sha256_content'])) self.assertEquals(result.status_code, 200) # verify the downloaded tar file fp = io.BytesIO(result.get_data()) t = tarfile.open(None, 'r:gz', fp) # extract it into a temporary directory temp_dir = tempfile.mkdtemp(dir=saq.test.test_dir) t.extractall(temp_dir) # try to load it alert = Alert(storage_dir=temp_dir) alert.load()
def handle_network_item(self, analysis_path): logging.info("got network item {}".format(analysis_path)) # create a temporary directory to extract the tar file temp_dir = tempfile.mkdtemp(suffix='.ace_submission') try: # extract the tar file inside this temporary directory p = Popen(['tar', 'xf', analysis_path, '-C', temp_dir], stdout=PIPE, stderr=PIPE) _stdout, _stderr = p.communicate() p.wait() if p.returncode != 0: logging.warning("tar returned non-zero status for {}".format( analysis_path)) if _stderr: logging.warning( "tar command printed text to stderr for {}: {}".format( analysis_path, _stderr)) # load the analysis root = Alert() root.storage_dir = temp_dir try: root.load() except Exception as e: logging.error("unable to load from {}: {}".format( analysis_path, e)) report_exception() return # move the storage_dir into ACE try: dest_dir = os.path.join(saq.CONFIG['global']['data_dir'], saq.SAQ_NODE, root.uuid[0:3], root.uuid) shutil.move(root.storage_dir, dest_dir) except Exception as e: logging.error("unable to move {} to {}: {}".format( root.storage_dir, dest_dir, e)) report_exception() return # change the location of the alert to this receiving system root.location = saq.SAQ_NODE # insert the alert into the database root.storage_dir = dest_dir if root.id: logging.debug( "removed previous id {} from forwarded alert {}".format( root.id, root)) root.id = None try: root.sync() root.request_correlation() except Exception as e: logging.error("unable to save alert from {}: {}".format( analysis_path, e)) report_exception() return # if we got to this point then we're done with this input file try: os.remove(analysis_path) except Exception as e: logging.error("unable to remove {}: {}".format( analysis_path, e)) report_exception() except Exception as e: logging.error("unable to process {}: {}".format(analysis_path, e)) report_exception() raise e finally: try: if os.path.exists(temp_dir): shutil.rmtree(temp_dir) except Exception as e: logging.error( "unable to delete temporary directory {}: {}".format( temp_dir, e)) report_exception()
def test_ace_engine_002_persistent_engine(self): engine = CustomACEEngine() if os.path.exists(engine.delayed_analysis_path): os.remove(engine.delayed_analysis_path) engine.enable_module('analysis_module_test_delayed_analysis') self.start_engine(engine) root = create_root_analysis(uuid=str(uuid.uuid4())) root.initialize_storage() o_uuid = root.add_observable(F_TEST, '0:05|0:10').id root.save() alert = Alert() alert.storage_dir = root.storage_dir alert.load() alert.sync() alert.request_correlation() def callback(): return os.path.exists(os.path.join(root.storage_dir, '.delayed')) self.assertTrue(self.wait_for_condition(callback)) self.kill_engine(engine) self.assertTrue(os.path.exists(engine.delayed_analysis_path)) with open(engine.delayed_analysis_path, 'rb') as fp: delayed_analysis = pickle.load(fp) if len(delayed_analysis) > 1: for item in delayed_analysis: print(item[1]) self.fail("more than one delayed analysis request is available") next_time, dar = delayed_analysis[0] # dar == delayed_analysis_request from saq.engine import DelayedAnalysisRequest self.assertIsInstance(dar, DelayedAnalysisRequest) self.assertEquals(dar.storage_dir, root.storage_dir) self.assertEquals(dar.target_type, type(alert)) self.assertEquals(dar.observable_uuid, o_uuid) self.assertEquals(dar.analysis_module, 'analysis_module_test_delayed_analysis') self.assertEquals(dar.uuid, root.uuid) self.assertFalse(dar.lock_proxy.is_locked()) from saq.modules.test import DelayedAnalysisTestAnalysis root = create_root_analysis(storage_dir=root.storage_dir) root.load() analysis = root.get_observable(o_uuid).get_analysis( DelayedAnalysisTestAnalysis) self.assertTrue(analysis.initial_request) self.assertFalse(analysis.delayed_request) self.assertEquals(analysis.request_count, 1) self.assertFalse(analysis.completed) engine = CustomACEEngine() engine.enable_module('analysis_module_test_delayed_analysis') self.start_engine(engine) engine.queue_work_item(TerminatingMarker()) self.wait_engine(engine) root = create_root_analysis(storage_dir=root.storage_dir) root.load() analysis = root.get_observable(o_uuid).get_analysis( DelayedAnalysisTestAnalysis) self.assertTrue(analysis.initial_request) self.assertTrue(analysis.delayed_request) self.assertEquals(analysis.request_count, 2) self.assertTrue(analysis.completed) self.assertFalse(os.path.exists(engine.delayed_analysis_path))
def test_ace_engine_003_persistent_engine_multiple(self): """Multiple delayed analysis requests are saved at shutdown and reloaded at startup.""" engine = CustomACEEngine() if os.path.exists(engine.delayed_analysis_path): os.remove(engine.delayed_analysis_path) tracking = {} # key = storage_dir, value = observable uuid engine.enable_module('analysis_module_test_delayed_analysis') self.start_engine(engine) for _ in range(3): root = create_root_analysis(uuid=str(uuid.uuid4())) root.initialize_storage() tracking[root.storage_dir] = root.add_observable( F_TEST, '0:10|0:15').id root.save() alert = Alert() alert.storage_dir = root.storage_dir alert.load() alert.sync() alert.request_correlation() def callback(): return os.path.exists( os.path.join(root.storage_dir, '.delayed')) self.assertTrue(self.wait_for_condition(callback)) self.kill_engine(engine) self.assertTrue(os.path.exists(engine.delayed_analysis_path)) with open(engine.delayed_analysis_path, 'rb') as fp: delayed_analysis = pickle.load(fp) self.assertEquals(len(delayed_analysis), 3) from saq.modules.test import DelayedAnalysisTestAnalysis for storage_dir in tracking.keys(): root = create_root_analysis(storage_dir=storage_dir) root.load() analysis = root.get_observable(tracking[storage_dir]).get_analysis( DelayedAnalysisTestAnalysis) self.assertTrue(analysis.initial_request) self.assertFalse(analysis.delayed_request) self.assertEquals(analysis.request_count, 1) self.assertFalse(analysis.completed) engine = CustomACEEngine() engine.enable_module('analysis_module_test_delayed_analysis') self.start_engine(engine) engine.queue_work_item(TerminatingMarker()) self.wait_engine(engine) for storage_dir in tracking.keys(): root = create_root_analysis(storage_dir=storage_dir) root.load() analysis = root.get_observable(tracking[storage_dir]).get_analysis( DelayedAnalysisTestAnalysis) self.assertTrue(analysis.initial_request) self.assertTrue(analysis.delayed_request) self.assertEquals(analysis.request_count, 2) self.assertTrue(analysis.completed) self.assertFalse(os.path.exists(engine.delayed_analysis_path))