Exemplo n.º 1
0
class TestSchedulerCompare(unittest.TestCase):
    def setUp(self):
        self.config = ConfigParser()
        self.config.add_section('ExpertSettings')
        self.config.set('ExpertSettings', 'block_delay', '2')
        self.config.set('ExpertSettings', 'ssdeep_ignore', '80')

        self.bs_patch = unittest.mock.patch(
            target='storage.binary_service.BinaryService',
            new=lambda _: MockDbInterface)
        self.bs_patch.start()

        self.compare_scheduler = CompareScheduler(
            config=self.config,
            db_interface=MockDbInterface(config=self.config),
            testing=True)

    def tearDown(self):
        self.compare_scheduler.shutdown()
        self.bs_patch.stop()

        gc.collect()

    def test_start_compare(self):
        result = self.compare_scheduler.add_task(('existing_id', True))
        self.assertIsNone(result, 'result ist not None')
        uid, redo = self.compare_scheduler.in_queue.get(timeout=2)
        self.assertEqual(uid, 'existing_id', 'retrieved id not correct')
        self.assertTrue(redo, 'redo argument not correct')

    def test_start(self):
        self.compare_scheduler.start()
        sleep(2)

    def test_compare_single_run(self):
        compares_done = set()
        self.compare_scheduler.in_queue.put(
            (self.compare_scheduler.db_interface.test_object.get_uid(), False))
        self.compare_scheduler._compare_single_run(compares_done)
        self.assertEqual(len(compares_done), 1,
                         'compares done not set correct')
        self.assertIn(
            self.compare_scheduler.db_interface.test_object.get_uid(),
            compares_done, 'correct uid not in compares done')

    def test_decide_whether_to_process(self):
        compares_done = set('a')
        self.assertTrue(
            self.compare_scheduler._decide_whether_to_process(
                'b', False, compares_done),
            'none existing should always be done')
        self.assertTrue(
            self.compare_scheduler._decide_whether_to_process(
                'a', True, compares_done),
            'redo is true so result should be true')
        self.assertFalse(
            self.compare_scheduler._decide_whether_to_process(
                'a', False, compares_done),
            'already done and redo no -> should be false')
Exemplo n.º 2
0
class FactBackend(FactBase):
    PROGRAM_NAME = 'FACT Backend'
    PROGRAM_DESCRIPTION = 'Firmware Analysis and Compare Tool (FACT) Backend'
    COMPONENT = 'backend'

    def __init__(self):
        super().__init__()

        try:
            self.analysis_service = AnalysisScheduler(config=self.config)
        except PluginInitException as error:
            logging.critical(
                f'Error during initialization of plugin {error.plugin.NAME}. Shutting down FACT backend'
            )
            complete_shutdown()
        self.tagging_service = TaggingDaemon(
            analysis_scheduler=self.analysis_service)
        self.unpacking_service = UnpackingScheduler(
            config=self.config,
            post_unpack=self.analysis_service.start_analysis_of_object,
            analysis_workload=self.analysis_service.get_scheduled_workload)
        self.compare_service = CompareScheduler(config=self.config)
        self.intercom = InterComBackEndBinding(
            config=self.config,
            analysis_service=self.analysis_service,
            compare_service=self.compare_service,
            unpacking_service=self.unpacking_service)

    def main(self):
        while self.run:
            self.work_load_stat.update(
                unpacking_workload=self.unpacking_service.
                get_scheduled_workload(),
                analysis_workload=self.analysis_service.get_scheduled_workload(
                ))
            if self._exception_occurred():
                break
            sleep(5)
            if self.args.testing:
                break

        self.shutdown()

    def shutdown(self):
        super().shutdown()
        self.intercom.shutdown()
        self.compare_service.shutdown()
        self.unpacking_service.shutdown()
        self.tagging_service.shutdown()
        self.analysis_service.shutdown()
        if not self.args.testing:
            complete_shutdown()

    def _exception_occurred(self):
        return any((self.unpacking_service.check_exceptions(),
                    self.compare_service.check_exceptions(),
                    self.analysis_service.check_exceptions()))
Exemplo n.º 3
0
        signal.signal(signal.SIGINT, lambda *_: None)
        os.setpgid(os.getpid(), os.getpid())  # reset pgid to self so that "complete_shutdown" doesn't run amok
    else:
        signal.signal(signal.SIGINT, shutdown)
    args, config = program_setup(PROGRAM_NAME, PROGRAM_DESCRIPTION)
    analysis_service = AnalysisScheduler(config=config)
    tagging_service = TaggingDaemon(analysis_scheduler=analysis_service)
    unpacking_service = UnpackingScheduler(config=config, post_unpack=analysis_service.start_analysis_of_object, analysis_workload=analysis_service.get_scheduled_workload)
    compare_service = CompareScheduler(config=config)
    intercom = InterComBackEndBinding(config=config, analysis_service=analysis_service, compare_service=compare_service, unpacking_service=unpacking_service)
    work_load_stat = WorkLoadStatistic(config=config)

    run = True
    while run:
        work_load_stat.update(unpacking_workload=unpacking_service.get_scheduled_workload(), analysis_workload=analysis_service.get_scheduled_workload())
        if any((unpacking_service.check_exceptions(), compare_service.check_exceptions(), analysis_service.check_exceptions())):
            break
        sleep(5)
        if args.testing:
            break

    logging.info('shutdown components')
    work_load_stat.shutdown()
    intercom.shutdown()
    compare_service.shutdown()
    unpacking_service.shutdown()
    tagging_service.shutdown()
    analysis_service.shutdown()
    if not args.testing:
        complete_shutdown()
class TestFileAddition(unittest.TestCase):
    @patch('unpacker.unpack.FS_Organizer', MockFSOrganizer)
    def setUp(self):
        self._tmp_dir = TemporaryDirectory()
        self._config = initialize_config(self._tmp_dir)
        self.elements_finished_analyzing = Value('i', 0)
        self.analysis_finished_event = Event()
        self.compare_finished_event = Event()

        self._mongo_server = MongoMgr(config=self._config, auth=False)
        self.backend_interface = BackEndDbInterface(config=self._config)

        self._analysis_scheduler = AnalysisScheduler(
            config=self._config,
            post_analysis=self.count_analysis_finished_event)
        self._unpack_scheduler = UnpackingScheduler(
            config=self._config, post_unpack=self._analysis_scheduler.add_task)
        self._compare_scheduler = CompareScheduler(
            config=self._config, callback=self.trigger_compare_finished_event)

    def count_analysis_finished_event(self, fw_object):
        self.backend_interface.add_analysis(fw_object)
        self.elements_finished_analyzing.value += 1
        if self.elements_finished_analyzing.value == 4 * 2 * 2:  # 2 container with 3 files each and 2 plugins
            self.analysis_finished_event.set()

    def trigger_compare_finished_event(self):
        self.compare_finished_event.set()

    def tearDown(self):
        self._compare_scheduler.shutdown()
        self._unpack_scheduler.shutdown()
        self._analysis_scheduler.shutdown()

        clean_test_database(self._config, get_database_names(self._config))
        self._mongo_server.shutdown()

        self._tmp_dir.cleanup()
        gc.collect()

    def test_unpack_analyse_and_compare(self):
        test_fw_1 = Firmware(
            file_path='{}/container/test.zip'.format(get_test_data_dir()))
        test_fw_1.release_date = '2017-01-01'
        test_fw_2 = Firmware(
            file_path='{}/regression_one'.format(get_test_data_dir()))
        test_fw_2.release_date = '2017-01-01'

        self._unpack_scheduler.add_task(test_fw_1)
        self._unpack_scheduler.add_task(test_fw_2)

        self.analysis_finished_event.wait(timeout=20)

        compare_id = normalize_compare_id(';'.join(
            [fw.uid for fw in [test_fw_1, test_fw_2]]))

        self.assertIsNone(
            self._compare_scheduler.add_task((compare_id, False)),
            'adding compare task creates error')

        self.compare_finished_event.wait(timeout=10)

        with ConnectTo(CompareDbInterface, self._config) as sc:
            result = sc.get_compare_result(compare_id)

        self.assertEqual(result['plugins']['Software'],
                         self._expected_result()['Software'])
        self.assertCountEqual(
            result['plugins']['File_Coverage']['files_in_common'],
            self._expected_result()['File_Coverage']['files_in_common'])

    @staticmethod
    def _expected_result():
        return {
            'File_Coverage': {
                'files_in_common': {
                    'all': [],
                    'collapse': False
                }
            },
            'Software': {
                'Compare Skipped': {
                    'all':
                    'Required analysis not present: [\'software_components\', \'software_components\']'
                }
            }
        }
class TestFileAddition(unittest.TestCase):

    @patch('unpacker.unpack.FS_Organizer', MockFSOrganizer)
    def setUp(self):
        self._tmp_dir = TemporaryDirectory()
        self._config = initialize_config(self._tmp_dir)
        self.elements_finished_analyzing = Value('i', 0)
        self.analysis_finished_event = Event()
        self.compare_finished_event = Event()

        self._mongo_server = MongoMgr(config=self._config, auth=False)
        self.backend_interface = BackEndDbInterface(config=self._config)

        self._analysis_scheduler = AnalysisScheduler(config=self._config, post_analysis=self.count_analysis_finished_event)
        self._unpack_scheduler = UnpackingScheduler(config=self._config, post_unpack=self._analysis_scheduler.add_task)
        self._compare_scheduler = CompareScheduler(config=self._config, callback=self.trigger_compare_finished_event)

    def count_analysis_finished_event(self, fw_object):
        self.backend_interface.add_object(fw_object)
        self.elements_finished_analyzing.value += 1
        if self.elements_finished_analyzing.value > 7:
            self.analysis_finished_event.set()

    def trigger_compare_finished_event(self):
        self.compare_finished_event.set()

    def tearDown(self):
        self._compare_scheduler.shutdown()
        self._unpack_scheduler.shutdown()
        self._analysis_scheduler.shutdown()

        clean_test_database(self._config, get_database_names(self._config))
        self._mongo_server.shutdown()

        self._tmp_dir.cleanup()
        gc.collect()

    def test_unpack_analyse_and_compare(self):
        test_fw_1 = Firmware(file_path='{}/container/test.zip'.format(get_test_data_dir()))
        test_fw_1.release_date = '2017-01-01'
        test_fw_2 = Firmware(file_path='{}/container/test.7z'.format(get_test_data_dir()))
        test_fw_2.release_date = '2017-01-01'

        self._unpack_scheduler.add_task(test_fw_1)
        self._unpack_scheduler.add_task(test_fw_2)

        self.analysis_finished_event.wait(timeout=10)

        compare_id = unify_string_list(';'.join([fw.uid for fw in [test_fw_1, test_fw_2]]))

        self.assertIsNone(self._compare_scheduler.add_task((compare_id, False)), 'adding compare task creates error')

        self.compare_finished_event.wait(timeout=10)

        with ConnectTo(CompareDbInterface, self._config) as sc:
            result = sc.get_compare_result(compare_id)

        self.assertFalse(isinstance(result, str), 'compare result should exist')
        self.assertEqual(result['plugins']['Software'], self._expected_result()['Software'])
        self.assertCountEqual(result['plugins']['File_Coverage']['exclusive_files'], self._expected_result()['File_Coverage']['exclusive_files'])

    @staticmethod
    def _expected_result():
        return {
            'File_Coverage': {
                'exclusive_files': {
                    '418a54d78550e8584291c96e5d6168133621f352bfc1d43cf84e81187fef4962_787': [],
                    'd38970f8c5153d1041810d0908292bc8df21e7fd88aab211a8fb96c54afe6b01_319': [],
                    'collapse': False
                },
                'files_in_common': {
                    'all': [
                        'faa11db49f32a90b51dfc3f0254f9fd7a7b46d0b570abd47e1943b86d554447a_28',
                        '289b5a050a83837f192d7129e4c4e02570b94b4924e50159fad5ed1067cfbfeb_20',
                        'd558c9339cb967341d701e3184f863d3928973fccdc1d96042583730b5c7b76a_62'
                    ],
                    'collapse': False
                },
                'similar_files': {}
            },
            'Software': {
                'Compare Skipped': {
                    'all': 'Required analysis not present: [\'software_components\', \'software_components\']'
                }
            }
        }