class AnalysisSchedulerTest(TestCase): def setUp(self): self.mocked_interface = DatabaseMock() self.enter_patch = mock.patch(target='helperFunctions.web_interface.ConnectTo.__enter__', new=lambda _: self.mocked_interface) self.enter_patch.start() self.exit_patch = mock.patch(target='helperFunctions.web_interface.ConnectTo.__exit__', new=fake_exit) self.exit_patch.start() config = get_config_for_testing() config.add_section('ip_and_uri_finder') config.set('ip_and_uri_finder', 'signature_directory', 'analysis/signatures/ip_and_uri_finder/') config.set('default_plugins', 'default', 'file_hashes') self.tmp_queue = Queue() self.sched = AnalysisScheduler(config=config, pre_analysis=lambda *_: None, post_analysis=self.dummy_callback, db_interface=self.mocked_interface) def tearDown(self): self.sched.shutdown() self.tmp_queue.close() self.enter_patch.stop() self.exit_patch.stop() self.mocked_interface.shutdown() gc.collect() def dummy_callback(self, fw): self.tmp_queue.put(fw)
class TestTagPropagation(unittest.TestCase): def setUp(self): self._tmp_dir = TemporaryDirectory() self._config = initialize_config(self._tmp_dir) self.analysis_finished_event = Event() self.uid_of_key_file = '530bf2f1203b789bfe054d3118ebd29a04013c587efd22235b3b9677cee21c0e_2048' self._mongo_server = MongoMgr(config=self._config, auth=False) self.backend_interface = BackEndDbInterface(config=self._config) self._analysis_scheduler = AnalysisScheduler( config=self._config, post_analysis=self.count_analysis_finished_event) self._tagging_scheduler = TaggingDaemon( analysis_scheduler=self._analysis_scheduler) self._unpack_scheduler = UnpackingScheduler( config=self._config, post_unpack=self._analysis_scheduler.add_task) def count_analysis_finished_event(self, fw_object): self.backend_interface.add_object(fw_object) if fw_object.uid == self.uid_of_key_file: self.analysis_finished_event.set() def _wait_for_empty_tag_queue(self): while not self._analysis_scheduler.tag_queue.empty(): sleep(0.1) def tearDown(self): self._unpack_scheduler.shutdown() self._tagging_scheduler.shutdown() self._analysis_scheduler.shutdown() clean_test_database(self._config, get_database_names(self._config)) self._mongo_server.shutdown() self._tmp_dir.cleanup() gc.collect() def test_run_analysis_with_tag(self): test_fw = Firmware( file_path='{}/container/with_key.7z'.format(get_test_data_dir())) test_fw.release_date = '2017-01-01' test_fw.scheduled_analysis = ['crypto_material'] self._unpack_scheduler.add_task(test_fw) assert self.analysis_finished_event.wait(timeout=20) processed_fo = self.backend_interface.get_object( self.uid_of_key_file, analysis_filter=['crypto_material']) assert processed_fo.processed_analysis['crypto_material'][ 'tags'], 'no tags set in analysis' self._wait_for_empty_tag_queue() processed_fw = self.backend_interface.get_object( test_fw.uid, analysis_filter=['crypto_material']) assert processed_fw.analysis_tags, 'tags not propagated properly' assert processed_fw.analysis_tags['crypto_material'][ 'private_key_inside']
def test_scheduler(test_config, finished_event, intermediate_event): interface = BackEndDbInterface(config=test_config) elements_finished = Value('i', 0) def count_pre_analysis(file_object): interface.add_object(file_object) elements_finished.value += 1 if elements_finished.value == 16: finished_event.set() elif elements_finished.value == 8: intermediate_event.set() analyzer = AnalysisScheduler(test_config, pre_analysis=count_pre_analysis, db_interface=interface) unpacker = UnpackingScheduler( config=test_config, post_unpack=analyzer.start_analysis_of_object) intercom = InterComBackEndBinding(config=test_config, analysis_service=analyzer, unpacking_service=unpacker, compare_service=MockScheduler()) yield unpacker intercom.shutdown() unpacker.shutdown() analyzer.shutdown()
class FactBackend(FactBase): PROGRAM_NAME = 'FACT Backend' PROGRAM_DESCRIPTION = 'Firmware Analysis and Compare Tool (FACT) Backend' COMPONENT = 'backend' def __init__(self): super().__init__() try: self.analysis_service = AnalysisScheduler(config=self.config) except PluginInitException as error: logging.critical( f'Error during initialization of plugin {error.plugin.NAME}. Shutting down FACT backend' ) complete_shutdown() self.tagging_service = TaggingDaemon( analysis_scheduler=self.analysis_service) self.unpacking_service = UnpackingScheduler( config=self.config, post_unpack=self.analysis_service.start_analysis_of_object, analysis_workload=self.analysis_service.get_scheduled_workload) self.compare_service = CompareScheduler(config=self.config) self.intercom = InterComBackEndBinding( config=self.config, analysis_service=self.analysis_service, compare_service=self.compare_service, unpacking_service=self.unpacking_service) def main(self): while self.run: self.work_load_stat.update( unpacking_workload=self.unpacking_service. get_scheduled_workload(), analysis_workload=self.analysis_service.get_scheduled_workload( )) if self._exception_occurred(): break sleep(5) if self.args.testing: break self.shutdown() def shutdown(self): super().shutdown() self.intercom.shutdown() self.compare_service.shutdown() self.unpacking_service.shutdown() self.tagging_service.shutdown() self.analysis_service.shutdown() if not self.args.testing: complete_shutdown() def _exception_occurred(self): return any((self.unpacking_service.check_exceptions(), self.compare_service.check_exceptions(), self.analysis_service.check_exceptions()))
def test_scheduler(test_config): analyzer = AnalysisScheduler(test_config) unpacker = UnpackingScheduler(config=test_config, post_unpack=analyzer.add_task) intercom = InterComBackEndBinding(config=test_config, analysis_service=analyzer, unpacking_service=unpacker, compare_service=MockScheduler()) yield unpacker intercom.shutdown() unpacker.shutdown() analyzer.shutdown()
class TestFileAddition(unittest.TestCase): @patch('unpacker.unpack.FS_Organizer', MockFSOrganizer) def setUp(self): self.mocked_interface = DatabaseMock() self.enter_patch = unittest.mock.patch( target='helperFunctions.web_interface.ConnectTo.__enter__', new=lambda _: self.mocked_interface) self.enter_patch.start() self.exit_patch = unittest.mock.patch( target='helperFunctions.web_interface.ConnectTo.__exit__', new=fake_exit) self.exit_patch.start() self._config = initialize_config(None) self._tmp_queue = Queue() self._analysis_scheduler = AnalysisScheduler( config=self._config, pre_analysis=lambda *_: None, post_analysis=self._dummy_callback, db_interface=MockDbInterface(None)) self._unpack_scheduler = UnpackingScheduler( config=self._config, post_unpack=self._analysis_scheduler.start_analysis_of_object, db_interface=self.mocked_interface) def tearDown(self): self._unpack_scheduler.shutdown() self._analysis_scheduler.shutdown() self._tmp_queue.close() self.enter_patch.stop() self.exit_patch.stop() self.mocked_interface.shutdown() gc.collect() def test_unpack_and_analyse(self): test_fw = Firmware( file_path='{}/container/test.zip'.format(get_test_data_dir())) self._unpack_scheduler.add_task(test_fw) for _ in range( 4 * 2 ): # container with 3 included files times 2 mandatory plugins run processed_container = self._tmp_queue.get(timeout=10) self.assertGreaterEqual(len(processed_container.processed_analysis), 3, 'at least one analysis not done') def _dummy_callback(self, fw): self._tmp_queue.put(fw)
signal.signal(signal.SIGINT, lambda *_: None) os.setpgid(os.getpid(), os.getpid()) # reset pgid to self so that "complete_shutdown" doesn't run amok else: signal.signal(signal.SIGINT, shutdown) args, config = program_setup(PROGRAM_NAME, PROGRAM_DESCRIPTION) analysis_service = AnalysisScheduler(config=config) tagging_service = TaggingDaemon(analysis_scheduler=analysis_service) unpacking_service = UnpackingScheduler(config=config, post_unpack=analysis_service.start_analysis_of_object, analysis_workload=analysis_service.get_scheduled_workload) compare_service = CompareScheduler(config=config) intercom = InterComBackEndBinding(config=config, analysis_service=analysis_service, compare_service=compare_service, unpacking_service=unpacking_service) work_load_stat = WorkLoadStatistic(config=config) run = True while run: work_load_stat.update(unpacking_workload=unpacking_service.get_scheduled_workload(), analysis_workload=analysis_service.get_scheduled_workload()) if any((unpacking_service.check_exceptions(), compare_service.check_exceptions(), analysis_service.check_exceptions())): break sleep(5) if args.testing: break logging.info('shutdown components') work_load_stat.shutdown() intercom.shutdown() compare_service.shutdown() unpacking_service.shutdown() tagging_service.shutdown() analysis_service.shutdown() if not args.testing: complete_shutdown()
class TestFileAddition(unittest.TestCase): @patch('unpacker.unpack.FS_Organizer', MockFSOrganizer) def setUp(self): self._tmp_dir = TemporaryDirectory() self._config = initialize_config(self._tmp_dir) self.elements_finished_analyzing = Value('i', 0) self.analysis_finished_event = Event() self.compare_finished_event = Event() self._mongo_server = MongoMgr(config=self._config, auth=False) self.backend_interface = BackEndDbInterface(config=self._config) self._analysis_scheduler = AnalysisScheduler( config=self._config, post_analysis=self.count_analysis_finished_event) self._unpack_scheduler = UnpackingScheduler( config=self._config, post_unpack=self._analysis_scheduler.add_task) self._compare_scheduler = CompareScheduler( config=self._config, callback=self.trigger_compare_finished_event) def count_analysis_finished_event(self, fw_object): self.backend_interface.add_analysis(fw_object) self.elements_finished_analyzing.value += 1 if self.elements_finished_analyzing.value == 4 * 2 * 2: # 2 container with 3 files each and 2 plugins self.analysis_finished_event.set() def trigger_compare_finished_event(self): self.compare_finished_event.set() def tearDown(self): self._compare_scheduler.shutdown() self._unpack_scheduler.shutdown() self._analysis_scheduler.shutdown() clean_test_database(self._config, get_database_names(self._config)) self._mongo_server.shutdown() self._tmp_dir.cleanup() gc.collect() def test_unpack_analyse_and_compare(self): test_fw_1 = Firmware( file_path='{}/container/test.zip'.format(get_test_data_dir())) test_fw_1.release_date = '2017-01-01' test_fw_2 = Firmware( file_path='{}/regression_one'.format(get_test_data_dir())) test_fw_2.release_date = '2017-01-01' self._unpack_scheduler.add_task(test_fw_1) self._unpack_scheduler.add_task(test_fw_2) self.analysis_finished_event.wait(timeout=20) compare_id = normalize_compare_id(';'.join( [fw.uid for fw in [test_fw_1, test_fw_2]])) self.assertIsNone( self._compare_scheduler.add_task((compare_id, False)), 'adding compare task creates error') self.compare_finished_event.wait(timeout=10) with ConnectTo(CompareDbInterface, self._config) as sc: result = sc.get_compare_result(compare_id) self.assertEqual(result['plugins']['Software'], self._expected_result()['Software']) self.assertCountEqual( result['plugins']['File_Coverage']['files_in_common'], self._expected_result()['File_Coverage']['files_in_common']) @staticmethod def _expected_result(): return { 'File_Coverage': { 'files_in_common': { 'all': [], 'collapse': False } }, 'Software': { 'Compare Skipped': { 'all': 'Required analysis not present: [\'software_components\', \'software_components\']' } } }
class TestFileAddition(unittest.TestCase): @patch('unpacker.unpack.FS_Organizer', MockFSOrganizer) def setUp(self): self._tmp_dir = TemporaryDirectory() self._config = initialize_config(self._tmp_dir) self.elements_finished_analyzing = Value('i', 0) self.analysis_finished_event = Event() self.compare_finished_event = Event() self._mongo_server = MongoMgr(config=self._config, auth=False) self.backend_interface = BackEndDbInterface(config=self._config) self._analysis_scheduler = AnalysisScheduler(config=self._config, post_analysis=self.count_analysis_finished_event) self._unpack_scheduler = UnpackingScheduler(config=self._config, post_unpack=self._analysis_scheduler.add_task) self._compare_scheduler = CompareScheduler(config=self._config, callback=self.trigger_compare_finished_event) def count_analysis_finished_event(self, fw_object): self.backend_interface.add_object(fw_object) self.elements_finished_analyzing.value += 1 if self.elements_finished_analyzing.value > 7: self.analysis_finished_event.set() def trigger_compare_finished_event(self): self.compare_finished_event.set() def tearDown(self): self._compare_scheduler.shutdown() self._unpack_scheduler.shutdown() self._analysis_scheduler.shutdown() clean_test_database(self._config, get_database_names(self._config)) self._mongo_server.shutdown() self._tmp_dir.cleanup() gc.collect() def test_unpack_analyse_and_compare(self): test_fw_1 = Firmware(file_path='{}/container/test.zip'.format(get_test_data_dir())) test_fw_1.release_date = '2017-01-01' test_fw_2 = Firmware(file_path='{}/container/test.7z'.format(get_test_data_dir())) test_fw_2.release_date = '2017-01-01' self._unpack_scheduler.add_task(test_fw_1) self._unpack_scheduler.add_task(test_fw_2) self.analysis_finished_event.wait(timeout=10) compare_id = unify_string_list(';'.join([fw.uid for fw in [test_fw_1, test_fw_2]])) self.assertIsNone(self._compare_scheduler.add_task((compare_id, False)), 'adding compare task creates error') self.compare_finished_event.wait(timeout=10) with ConnectTo(CompareDbInterface, self._config) as sc: result = sc.get_compare_result(compare_id) self.assertFalse(isinstance(result, str), 'compare result should exist') self.assertEqual(result['plugins']['Software'], self._expected_result()['Software']) self.assertCountEqual(result['plugins']['File_Coverage']['exclusive_files'], self._expected_result()['File_Coverage']['exclusive_files']) @staticmethod def _expected_result(): return { 'File_Coverage': { 'exclusive_files': { '418a54d78550e8584291c96e5d6168133621f352bfc1d43cf84e81187fef4962_787': [], 'd38970f8c5153d1041810d0908292bc8df21e7fd88aab211a8fb96c54afe6b01_319': [], 'collapse': False }, 'files_in_common': { 'all': [ 'faa11db49f32a90b51dfc3f0254f9fd7a7b46d0b570abd47e1943b86d554447a_28', '289b5a050a83837f192d7129e4c4e02570b94b4924e50159fad5ed1067cfbfeb_20', 'd558c9339cb967341d701e3184f863d3928973fccdc1d96042583730b5c7b76a_62' ], 'collapse': False }, 'similar_files': {} }, 'Software': { 'Compare Skipped': { 'all': 'Required analysis not present: [\'software_components\', \'software_components\']' } } }
class TestScheduleInitialAnalysis(unittest.TestCase): def setUp(self): self.mocked_interface = DatabaseMock() self.enter_patch = unittest.mock.patch( target='helperFunctions.web_interface.ConnectTo.__enter__', new=lambda _: self.mocked_interface) self.enter_patch.start() self.exit_patch = unittest.mock.patch( target='helperFunctions.web_interface.ConnectTo.__exit__', new=fake_exit) self.exit_patch.start() config = get_config_for_testing() config.add_section('ip_and_uri_finder') config.set('ip_and_uri_finder', 'signature_directory', 'analysis/signatures/ip_and_uri_finder/') config.add_section('default_plugins') config.set('default_plugins', 'plugins', 'file_hashes') self.tmp_queue = Queue() self.sched = AnalysisScheduler(config=config, post_analysis=self.dummy_callback, db_interface=DatabaseMock()) def tearDown(self): self.sched.shutdown() self.tmp_queue.close() self.enter_patch.stop() self.exit_patch.stop() self.mocked_interface.shutdown() gc.collect() def test_plugin_registration(self): self.assertIn('dummy_plugin_for_testing_only', self.sched.analysis_plugins, 'Dummy plugin not found') def test_schedule_firmware_init_no_analysis_selected(self): self.sched.shutdown() self.sched.process_queue = Queue() test_fw = Firmware(binary=b'test') self.sched.add_task(test_fw) test_fw = self.sched.process_queue.get(timeout=5) self.assertEqual(len(test_fw.scheduled_analysis), len(MANDATORY_PLUGINS), 'Mandatory Plugins not selected') for item in MANDATORY_PLUGINS: self.assertIn(item, test_fw.scheduled_analysis) def test_whole_run_analyis_selected(self): test_fw = Firmware(file_path=os.path.join(get_test_data_dir(), 'get_files_test/testfile1')) test_fw.scheduled_analysis = ['dummy_plugin_for_testing_only'] self.sched.add_task(test_fw) test_fw = self.tmp_queue.get(timeout=10) self.assertEqual(len(test_fw.processed_analysis), 3, 'analysis not done') self.assertEqual( test_fw.processed_analysis['dummy_plugin_for_testing_only']['1'], 'first result', 'result not correct') self.assertEqual( test_fw.processed_analysis['dummy_plugin_for_testing_only'] ['summary'], ['first result', 'second result']) self.assertIn('file_hashes', test_fw.processed_analysis.keys(), 'Mandatory plug-in not executed') self.assertIn('file_type', test_fw.processed_analysis.keys(), 'Mandatory plug-in not executed') def test_get_plugin_dict(self): result = self.sched.get_plugin_dict() self.assertIn('file_hashes', result.keys(), 'file hashes plugin not found') self.assertTrue(result['file_hashes'][1], 'mandatory flag not set') self.assertTrue(result['file_hashes'][2], 'default flag not set') self.assertIn('file_type', result.keys(), 'file type plugin not found') self.assertFalse(result['file_type'][2], 'default flag set but should not') self.assertEqual(result['file_type'][0], self.sched.analysis_plugins['file_type'].DESCRIPTION, 'description not correct') self.assertTrue(result['unpacker'][1], 'unpacker plugin not marked as mandatory') self.assertNotIn('dummy_plug_in_for_testing_only', result.keys(), 'dummy plug-in not removed') def dummy_callback(self, fw): self.tmp_queue.put(fw)