def test_scheduler(test_config, finished_event, intermediate_event): interface = BackEndDbInterface(config=test_config) elements_finished = Value('i', 0) def count_pre_analysis(file_object): interface.add_object(file_object) elements_finished.value += 1 if elements_finished.value == 16: finished_event.set() elif elements_finished.value == 8: intermediate_event.set() analyzer = AnalysisScheduler(test_config, pre_analysis=count_pre_analysis, db_interface=interface) unpacker = UnpackingScheduler( config=test_config, post_unpack=analyzer.start_analysis_of_object) intercom = InterComBackEndBinding(config=test_config, analysis_service=analyzer, unpacking_service=unpacker, compare_service=MockScheduler()) yield unpacker intercom.shutdown() unpacker.shutdown() analyzer.shutdown()
def test_rest_download_valid(self): backend_binding = InterComBackEndBinding( config=self.config, analysis_service=test_backend_scheduler.AnalysisServiceMock(), compare_service=test_backend_scheduler.ServiceMock( self.test_queue), unpacking_service=test_backend_scheduler.ServiceMock( self.test_queue)) test_firmware = create_test_firmware(device_class='test class', device_name='test device', vendor='test vendor') store_binary_on_file_system(self.tmp_dir.name, test_firmware) self.db_interface.add_firmware(test_firmware) try: rv = self.test_client.get('/rest/binary/{}'.format( test_firmware.uid), follow_redirects=True) finally: backend_binding.shutdown() assert standard_b64encode(test_firmware.binary) in rv.data assert '"file_name": "{}"'.format( test_firmware.file_name).encode() in rv.data assert '"SHA256": "{}"'.format( test_firmware.sha256).encode() in rv.data
def setUp(self): config = get_config_for_testing(TMP_DIR) self.test_queue = Queue() self.interface = InterComBackEndBinding( config=config, testing=True, analysis_service=AnalysisServiceMock(), compare_service=ServiceMock(self.test_queue), unpacking_service=ServiceMock(self.test_queue) ) self.interface.WAIT_TIME = 2 self.db = MongoMgr(config=config)
class FactBackend(FactBase): PROGRAM_NAME = 'FACT Backend' PROGRAM_DESCRIPTION = 'Firmware Analysis and Compare Tool (FACT) Backend' COMPONENT = 'backend' def __init__(self): super().__init__() try: self.analysis_service = AnalysisScheduler(config=self.config) except PluginInitException as error: logging.critical( f'Error during initialization of plugin {error.plugin.NAME}. Shutting down FACT backend' ) complete_shutdown() self.tagging_service = TaggingDaemon( analysis_scheduler=self.analysis_service) self.unpacking_service = UnpackingScheduler( config=self.config, post_unpack=self.analysis_service.start_analysis_of_object, analysis_workload=self.analysis_service.get_scheduled_workload) self.compare_service = CompareScheduler(config=self.config) self.intercom = InterComBackEndBinding( config=self.config, analysis_service=self.analysis_service, compare_service=self.compare_service, unpacking_service=self.unpacking_service) def main(self): while self.run: self.work_load_stat.update( unpacking_workload=self.unpacking_service. get_scheduled_workload(), analysis_workload=self.analysis_service.get_scheduled_workload( )) if self._exception_occurred(): break sleep(5) if self.args.testing: break self.shutdown() def shutdown(self): super().shutdown() self.intercom.shutdown() self.compare_service.shutdown() self.unpacking_service.shutdown() self.tagging_service.shutdown() self.analysis_service.shutdown() if not self.args.testing: complete_shutdown() def _exception_occurred(self): return any((self.unpacking_service.check_exceptions(), self.compare_service.check_exceptions(), self.analysis_service.check_exceptions()))
def test_scheduler(test_config): analyzer = AnalysisScheduler(test_config) unpacker = UnpackingScheduler(config=test_config, post_unpack=analyzer.add_task) intercom = InterComBackEndBinding(config=test_config, analysis_service=analyzer, unpacking_service=unpacker, compare_service=MockScheduler()) yield unpacker intercom.shutdown() unpacker.shutdown() analyzer.shutdown()
def get_intercom_for_testing(): with TemporaryDirectory(prefix='fact_test_') as tmp_dir: config = get_config_for_testing(tmp_dir) test_queue = Queue() interface = InterComBackEndBinding( config=config, testing=True, analysis_service=AnalysisServiceMock(), compare_service=ServiceMock(test_queue), unpacking_service=ServiceMock(test_queue)) interface.WAIT_TIME = 2 db = MongoMgr(config=config) yield interface interface.shutdown() test_queue.close() db.shutdown() gc.collect()
def _start_backend(self): self.analysis_service = AnalysisScheduler(config=self.config) self.unpacking_service = UnpackingScheduler( config=self.config, post_unpack=self.analysis_service.add_task) self.compare_service = CompareScheduler(config=self.config) self.intercom = InterComBackEndBinding( config=self.config, analysis_service=self.analysis_service, compare_service=self.compare_service, unpacking_service=self.unpacking_service)
def __init__(self): super().__init__() try: self.analysis_service = AnalysisScheduler(config=self.config) except PluginInitException as error: logging.critical( f'Error during initialization of plugin {error.plugin.NAME}. Shutting down FACT backend' ) complete_shutdown() self.unpacking_service = UnpackingScheduler( config=self.config, post_unpack=self.analysis_service.start_analysis_of_object, analysis_workload=self.analysis_service.get_scheduled_workload) self.compare_service = CompareScheduler(config=self.config) self.intercom = InterComBackEndBinding( config=self.config, analysis_service=self.analysis_service, compare_service=self.compare_service, unpacking_service=self.unpacking_service)
class TestInterComBackEndScheduler(unittest.TestCase): def setUp(self): config = get_config_for_testing(TMP_DIR) self.test_queue = Queue() self.interface = InterComBackEndBinding( config=config, testing=True, analysis_service=AnalysisServiceMock(), compare_service=ServiceMock(self.test_queue), unpacking_service=ServiceMock(self.test_queue)) self.interface.WAIT_TIME = 2 self.db = MongoMgr(config=config) def tearDown(self): self.interface.shutdown() self.test_queue.close() self.db.shutdown() TMP_DIR.cleanup() gc.collect() def test_backend_worker(self): service = ServiceMock(self.test_queue) self.interface._start_listener(CommunicationBackendMock, service.add_task) # pylint: disable=protected-access result = self.test_queue.get(timeout=5) self.assertEqual(result, 'test_task', 'task not received correctly') def test_all_listeners_started(self): self.interface.startup() sleep(2) self.assertEqual(len(self.interface.process_list), NUMBER_OF_LISTENERS, 'Not all listeners started')
def _start_backend(self, post_analysis=None, compare_callback=None): self.analysis_service = AnalysisScheduler(config=self.config, post_analysis=post_analysis) self.unpacking_service = UnpackingScheduler( config=self.config, post_unpack=self.analysis_service.start_analysis_of_object) self.compare_service = CompareScheduler(config=self.config, callback=compare_callback) self.intercom = InterComBackEndBinding( config=self.config, analysis_service=self.analysis_service, compare_service=self.compare_service, unpacking_service=self.unpacking_service)
def _start_backend(self, post_analysis=None, compare_callback=None): self.analysis_service = AnalysisScheduler(config=self.config, post_analysis=post_analysis) self.unpacking_service = UnpackingScheduler( config=self.config, post_unpack=self.analysis_service.add_task) self.compare_service = CompareScheduler(config=self.config, callback=compare_callback) with patch.object(InterComBackEndBinding, 'WAIT_TIME', .5): self.intercom = InterComBackEndBinding( config=self.config, analysis_service=self.analysis_service, compare_service=self.compare_service, unpacking_service=self.unpacking_service)
def _start_backend(self, post_analysis=None, compare_callback=None): # pylint: disable=attribute-defined-outside-init self.analysis_service = AnalysisScheduler(config=self.config, post_analysis=post_analysis) self.unpacking_service = UnpackingScheduler( config=self.config, post_unpack=self.analysis_service.start_analysis_of_object) self.compare_service = CompareScheduler(config=self.config, callback=compare_callback) self.intercom = InterComBackEndBinding( config=self.config, analysis_service=self.analysis_service, compare_service=self.compare_service, unpacking_service=self.unpacking_service) self.fs_organizer = FSOrganizer(config=self.config)
run = False if __name__ == '__main__': if was_started_by_start_fact(): signal.signal(signal.SIGUSR1, shutdown) signal.signal(signal.SIGINT, lambda *_: None) os.setpgid(os.getpid(), os.getpid()) # reset pgid to self so that "complete_shutdown" doesn't run amok else: signal.signal(signal.SIGINT, shutdown) args, config = program_setup(PROGRAM_NAME, PROGRAM_DESCRIPTION) analysis_service = AnalysisScheduler(config=config) tagging_service = TaggingDaemon(analysis_scheduler=analysis_service) unpacking_service = UnpackingScheduler(config=config, post_unpack=analysis_service.start_analysis_of_object, analysis_workload=analysis_service.get_scheduled_workload) compare_service = CompareScheduler(config=config) intercom = InterComBackEndBinding(config=config, analysis_service=analysis_service, compare_service=compare_service, unpacking_service=unpacking_service) work_load_stat = WorkLoadStatistic(config=config) run = True while run: work_load_stat.update(unpacking_workload=unpacking_service.get_scheduled_workload(), analysis_workload=analysis_service.get_scheduled_workload()) if any((unpacking_service.check_exceptions(), compare_service.check_exceptions(), analysis_service.check_exceptions())): break sleep(5) if args.testing: break logging.info('shutdown components') work_load_stat.shutdown() intercom.shutdown() compare_service.shutdown()