class TestTagPropagation(unittest.TestCase): def setUp(self): self._tmp_dir = TemporaryDirectory() self._config = initialize_config(self._tmp_dir) self.analysis_finished_event = Event() self.uid_of_key_file = '530bf2f1203b789bfe054d3118ebd29a04013c587efd22235b3b9677cee21c0e_2048' self._mongo_server = MongoMgr(config=self._config, auth=False) self.backend_interface = BackEndDbInterface(config=self._config) self._analysis_scheduler = AnalysisScheduler( config=self._config, post_analysis=self.count_analysis_finished_event) self._tagging_scheduler = TaggingDaemon( analysis_scheduler=self._analysis_scheduler) self._unpack_scheduler = UnpackingScheduler( config=self._config, post_unpack=self._analysis_scheduler.add_task) def count_analysis_finished_event(self, fw_object): self.backend_interface.add_object(fw_object) if fw_object.uid == self.uid_of_key_file: self.analysis_finished_event.set() def _wait_for_empty_tag_queue(self): while not self._analysis_scheduler.tag_queue.empty(): sleep(0.1) def tearDown(self): self._unpack_scheduler.shutdown() self._tagging_scheduler.shutdown() self._analysis_scheduler.shutdown() clean_test_database(self._config, get_database_names(self._config)) self._mongo_server.shutdown() self._tmp_dir.cleanup() gc.collect() def test_run_analysis_with_tag(self): test_fw = Firmware( file_path='{}/container/with_key.7z'.format(get_test_data_dir())) test_fw.release_date = '2017-01-01' test_fw.scheduled_analysis = ['crypto_material'] self._unpack_scheduler.add_task(test_fw) assert self.analysis_finished_event.wait(timeout=20) processed_fo = self.backend_interface.get_object( self.uid_of_key_file, analysis_filter=['crypto_material']) assert processed_fo.processed_analysis['crypto_material'][ 'tags'], 'no tags set in analysis' self._wait_for_empty_tag_queue() processed_fw = self.backend_interface.get_object( test_fw.uid, analysis_filter=['crypto_material']) assert processed_fw.analysis_tags, 'tags not propagated properly' assert processed_fw.analysis_tags['crypto_material'][ 'private_key_inside']
class FactBackend(FactBase): PROGRAM_NAME = 'FACT Backend' PROGRAM_DESCRIPTION = 'Firmware Analysis and Compare Tool (FACT) Backend' COMPONENT = 'backend' def __init__(self): super().__init__() try: self.analysis_service = AnalysisScheduler(config=self.config) except PluginInitException as error: logging.critical( f'Error during initialization of plugin {error.plugin.NAME}. Shutting down FACT backend' ) complete_shutdown() self.tagging_service = TaggingDaemon( analysis_scheduler=self.analysis_service) self.unpacking_service = UnpackingScheduler( config=self.config, post_unpack=self.analysis_service.start_analysis_of_object, analysis_workload=self.analysis_service.get_scheduled_workload) self.compare_service = CompareScheduler(config=self.config) self.intercom = InterComBackEndBinding( config=self.config, analysis_service=self.analysis_service, compare_service=self.compare_service, unpacking_service=self.unpacking_service) def main(self): while self.run: self.work_load_stat.update( unpacking_workload=self.unpacking_service. get_scheduled_workload(), analysis_workload=self.analysis_service.get_scheduled_workload( )) if self._exception_occurred(): break sleep(5) if self.args.testing: break self.shutdown() def shutdown(self): super().shutdown() self.intercom.shutdown() self.compare_service.shutdown() self.unpacking_service.shutdown() self.tagging_service.shutdown() self.analysis_service.shutdown() if not self.args.testing: complete_shutdown() def _exception_occurred(self): return any((self.unpacking_service.check_exceptions(), self.compare_service.check_exceptions(), self.analysis_service.check_exceptions()))
def setUp(self): self._tmp_dir = TemporaryDirectory() self._config = initialize_config(self._tmp_dir) self.analysis_finished_event = Event() self.uid_of_key_file = '530bf2f1203b789bfe054d3118ebd29a04013c587efd22235b3b9677cee21c0e_2048' self._mongo_server = MongoMgr(config=self._config, auth=False) self.backend_interface = BackEndDbInterface(config=self._config) self._analysis_scheduler = AnalysisScheduler(config=self._config, pre_analysis=self.backend_interface.add_object, post_analysis=self.count_analysis_finished_event) self._tagging_scheduler = TaggingDaemon(analysis_scheduler=self._analysis_scheduler) self._unpack_scheduler = UnpackingScheduler(config=self._config, post_unpack=self._analysis_scheduler.start_analysis_of_object)
def __init__(self): super().__init__() try: self.analysis_service = AnalysisScheduler(config=self.config) except PluginInitException as error: logging.critical( f'Error during initialization of plugin {error.plugin.NAME}. Shutting down FACT backend' ) complete_shutdown() self.tagging_service = TaggingDaemon( analysis_scheduler=self.analysis_service) self.unpacking_service = UnpackingScheduler( config=self.config, post_unpack=self.analysis_service.start_analysis_of_object, analysis_workload=self.analysis_service.get_scheduled_workload) self.compare_service = CompareScheduler(config=self.config) self.intercom = InterComBackEndBinding( config=self.config, analysis_service=self.analysis_service, compare_service=self.compare_service, unpacking_service=self.unpacking_service)
def shutdown(signum, _): global run logging.info('received {signum}. shutting down {name}...'.format(signum=signum, name=PROGRAM_NAME)) run = False if __name__ == '__main__': if was_started_by_start_fact(): signal.signal(signal.SIGUSR1, shutdown) signal.signal(signal.SIGINT, lambda *_: None) os.setpgid(os.getpid(), os.getpid()) # reset pgid to self so that "complete_shutdown" doesn't run amok else: signal.signal(signal.SIGINT, shutdown) args, config = program_setup(PROGRAM_NAME, PROGRAM_DESCRIPTION) analysis_service = AnalysisScheduler(config=config) tagging_service = TaggingDaemon(analysis_scheduler=analysis_service) unpacking_service = UnpackingScheduler(config=config, post_unpack=analysis_service.start_analysis_of_object, analysis_workload=analysis_service.get_scheduled_workload) compare_service = CompareScheduler(config=config) intercom = InterComBackEndBinding(config=config, analysis_service=analysis_service, compare_service=compare_service, unpacking_service=unpacking_service) work_load_stat = WorkLoadStatistic(config=config) run = True while run: work_load_stat.update(unpacking_workload=unpacking_service.get_scheduled_workload(), analysis_workload=analysis_service.get_scheduled_workload()) if any((unpacking_service.check_exceptions(), compare_service.check_exceptions(), analysis_service.check_exceptions())): break sleep(5) if args.testing: break logging.info('shutdown components')
def detached_scheduler(monkeypatch, analysis_service): monkeypatch.setattr('scheduler.analysis_tag.ExceptionSafeProcess', MockProcess) return TaggingDaemon(analysis_scheduler=analysis_service)
def scheduler(analysis_service): return TaggingDaemon(analysis_scheduler=analysis_service)