Exemplo n.º 1
0
class TestFileAddition(unittest.TestCase):
    @patch('unpacker.unpack.FS_Organizer', MockFSOrganizer)
    def setUp(self):
        self._config = initialize_config(tmp_dir=None)
        self._tmp_queue = Queue()
        self._unpack_scheduler = UnpackingScheduler(
            config=self._config,
            post_unpack=self._dummy_callback,
            db_interface=DatabaseMock())

    def tearDown(self):
        self._unpack_scheduler.shutdown()
        self._tmp_queue.close()
        gc.collect()

    def test_unpack_only(self):
        test_fw = Firmware(
            file_path='{}/container/test.zip'.format(get_test_data_dir()))

        self._unpack_scheduler.add_task(test_fw)

        processed_container = self._tmp_queue.get(timeout=5)

        self.assertEqual(len(processed_container.files_included), 3,
                         'not all included files found')
        self.assertIn(
            'faa11db49f32a90b51dfc3f0254f9fd7a7b46d0b570abd47e1943b86d554447a_28',
            processed_container.files_included,
            'certain file missing after unpacking')

    def _dummy_callback(self, fw):
        self._tmp_queue.put(fw)
Exemplo n.º 2
0
class TestTagPropagation(unittest.TestCase):
    def setUp(self):
        self._tmp_dir = TemporaryDirectory()
        self._config = initialize_config(self._tmp_dir)
        self.analysis_finished_event = Event()
        self.uid_of_key_file = '530bf2f1203b789bfe054d3118ebd29a04013c587efd22235b3b9677cee21c0e_2048'

        self._mongo_server = MongoMgr(config=self._config, auth=False)
        self.backend_interface = BackEndDbInterface(config=self._config)

        self._analysis_scheduler = AnalysisScheduler(
            config=self._config,
            post_analysis=self.count_analysis_finished_event)
        self._tagging_scheduler = TaggingDaemon(
            analysis_scheduler=self._analysis_scheduler)
        self._unpack_scheduler = UnpackingScheduler(
            config=self._config, post_unpack=self._analysis_scheduler.add_task)

    def count_analysis_finished_event(self, fw_object):
        self.backend_interface.add_object(fw_object)
        if fw_object.uid == self.uid_of_key_file:
            self.analysis_finished_event.set()

    def _wait_for_empty_tag_queue(self):
        while not self._analysis_scheduler.tag_queue.empty():
            sleep(0.1)

    def tearDown(self):
        self._unpack_scheduler.shutdown()
        self._tagging_scheduler.shutdown()
        self._analysis_scheduler.shutdown()

        clean_test_database(self._config, get_database_names(self._config))
        self._mongo_server.shutdown()

        self._tmp_dir.cleanup()
        gc.collect()

    def test_run_analysis_with_tag(self):
        test_fw = Firmware(
            file_path='{}/container/with_key.7z'.format(get_test_data_dir()))
        test_fw.release_date = '2017-01-01'
        test_fw.scheduled_analysis = ['crypto_material']

        self._unpack_scheduler.add_task(test_fw)

        assert self.analysis_finished_event.wait(timeout=20)

        processed_fo = self.backend_interface.get_object(
            self.uid_of_key_file, analysis_filter=['crypto_material'])
        assert processed_fo.processed_analysis['crypto_material'][
            'tags'], 'no tags set in analysis'

        self._wait_for_empty_tag_queue()

        processed_fw = self.backend_interface.get_object(
            test_fw.uid, analysis_filter=['crypto_material'])
        assert processed_fw.analysis_tags, 'tags not propagated properly'
        assert processed_fw.analysis_tags['crypto_material'][
            'private_key_inside']
def test_scheduler(test_config, finished_event, intermediate_event):
    interface = BackEndDbInterface(config=test_config)
    elements_finished = Value('i', 0)

    def count_pre_analysis(file_object):
        interface.add_object(file_object)
        elements_finished.value += 1
        if elements_finished.value == 16:
            finished_event.set()
        elif elements_finished.value == 8:
            intermediate_event.set()

    analyzer = AnalysisScheduler(test_config,
                                 pre_analysis=count_pre_analysis,
                                 db_interface=interface)
    unpacker = UnpackingScheduler(
        config=test_config, post_unpack=analyzer.start_analysis_of_object)
    intercom = InterComBackEndBinding(config=test_config,
                                      analysis_service=analyzer,
                                      unpacking_service=unpacker,
                                      compare_service=MockScheduler())
    yield unpacker
    intercom.shutdown()
    unpacker.shutdown()
    analyzer.shutdown()
Exemplo n.º 4
0
 def setUp(self):
     self._config = initialize_config(tmp_dir=None)
     self._tmp_queue = Queue()
     self._unpack_scheduler = UnpackingScheduler(
         config=self._config,
         post_unpack=self._dummy_callback,
         db_interface=DatabaseMock())
Exemplo n.º 5
0
class FactBackend(FactBase):
    PROGRAM_NAME = 'FACT Backend'
    PROGRAM_DESCRIPTION = 'Firmware Analysis and Compare Tool (FACT) Backend'
    COMPONENT = 'backend'

    def __init__(self):
        super().__init__()

        try:
            self.analysis_service = AnalysisScheduler(config=self.config)
        except PluginInitException as error:
            logging.critical(
                f'Error during initialization of plugin {error.plugin.NAME}. Shutting down FACT backend'
            )
            complete_shutdown()
        self.tagging_service = TaggingDaemon(
            analysis_scheduler=self.analysis_service)
        self.unpacking_service = UnpackingScheduler(
            config=self.config,
            post_unpack=self.analysis_service.start_analysis_of_object,
            analysis_workload=self.analysis_service.get_scheduled_workload)
        self.compare_service = CompareScheduler(config=self.config)
        self.intercom = InterComBackEndBinding(
            config=self.config,
            analysis_service=self.analysis_service,
            compare_service=self.compare_service,
            unpacking_service=self.unpacking_service)

    def main(self):
        while self.run:
            self.work_load_stat.update(
                unpacking_workload=self.unpacking_service.
                get_scheduled_workload(),
                analysis_workload=self.analysis_service.get_scheduled_workload(
                ))
            if self._exception_occurred():
                break
            sleep(5)
            if self.args.testing:
                break

        self.shutdown()

    def shutdown(self):
        super().shutdown()
        self.intercom.shutdown()
        self.compare_service.shutdown()
        self.unpacking_service.shutdown()
        self.tagging_service.shutdown()
        self.analysis_service.shutdown()
        if not self.args.testing:
            complete_shutdown()

    def _exception_occurred(self):
        return any((self.unpacking_service.check_exceptions(),
                    self.compare_service.check_exceptions(),
                    self.analysis_service.check_exceptions()))
    def setUp(self):
        self._tmp_dir = TemporaryDirectory()
        self._config = initialize_config(self._tmp_dir)

        self._mongo_server = MongoMgr(config=self._config, auth=False)

        self._analysis_scheduler = AnalysisScheduler(config=self._config)
        self._unpack_scheduler = UnpackingScheduler(
            config=self._config, post_unpack=self._analysis_scheduler.add_task)
        self._compare_scheduler = CompareScheduler(config=self._config)
Exemplo n.º 7
0
    def setUp(self):
        self._tmp_dir = TemporaryDirectory()
        self._config = initialize_config(self._tmp_dir)
        self.analysis_finished_event = Event()
        self.uid_of_key_file = '530bf2f1203b789bfe054d3118ebd29a04013c587efd22235b3b9677cee21c0e_2048'

        self._mongo_server = MongoMgr(config=self._config, auth=False)
        self.backend_interface = BackEndDbInterface(config=self._config)

        self._analysis_scheduler = AnalysisScheduler(config=self._config, pre_analysis=self.backend_interface.add_object, post_analysis=self.count_analysis_finished_event)
        self._unpack_scheduler = UnpackingScheduler(config=self._config, post_unpack=self._analysis_scheduler.start_analysis_of_object)
Exemplo n.º 8
0
def test_scheduler(test_config):
    analyzer = AnalysisScheduler(test_config)
    unpacker = UnpackingScheduler(config=test_config,
                                  post_unpack=analyzer.add_task)
    intercom = InterComBackEndBinding(config=test_config,
                                      analysis_service=analyzer,
                                      unpacking_service=unpacker,
                                      compare_service=MockScheduler())
    yield unpacker
    intercom.shutdown()
    unpacker.shutdown()
    analyzer.shutdown()
    def setUp(self):
        self.mocked_interface = DatabaseMock()
        self.enter_patch = unittest.mock.patch(target='helperFunctions.web_interface.ConnectTo.__enter__', new=lambda _: self.mocked_interface)
        self.enter_patch.start()
        self.exit_patch = unittest.mock.patch(target='helperFunctions.web_interface.ConnectTo.__exit__', new=fake_exit)
        self.exit_patch.start()

        self._config = initialize_config(None)
        self._tmp_queue = Queue()

        self._analysis_scheduler = AnalysisScheduler(config=self._config, post_analysis=self._dummy_callback, db_interface=MockDbInterface(None))
        self._unpack_scheduler = UnpackingScheduler(config=self._config, post_unpack=self._analysis_scheduler.add_task)
Exemplo n.º 10
0
class TestFileAddition(unittest.TestCase):
    @patch('unpacker.unpack.FS_Organizer', MockFSOrganizer)
    def setUp(self):
        self.mocked_interface = DatabaseMock()
        self.enter_patch = unittest.mock.patch(
            target='helperFunctions.web_interface.ConnectTo.__enter__',
            new=lambda _: self.mocked_interface)
        self.enter_patch.start()
        self.exit_patch = unittest.mock.patch(
            target='helperFunctions.web_interface.ConnectTo.__exit__',
            new=fake_exit)
        self.exit_patch.start()

        self._config = initialize_config(None)
        self._tmp_queue = Queue()

        self._analysis_scheduler = AnalysisScheduler(
            config=self._config,
            pre_analysis=lambda *_: None,
            post_analysis=self._dummy_callback,
            db_interface=MockDbInterface(None))
        self._unpack_scheduler = UnpackingScheduler(
            config=self._config,
            post_unpack=self._analysis_scheduler.start_analysis_of_object,
            db_interface=self.mocked_interface)

    def tearDown(self):
        self._unpack_scheduler.shutdown()
        self._analysis_scheduler.shutdown()
        self._tmp_queue.close()

        self.enter_patch.stop()
        self.exit_patch.stop()
        self.mocked_interface.shutdown()
        gc.collect()

    def test_unpack_and_analyse(self):
        test_fw = Firmware(
            file_path='{}/container/test.zip'.format(get_test_data_dir()))

        self._unpack_scheduler.add_task(test_fw)

        for _ in range(
                4 * 2
        ):  # container with 3 included files times 2 mandatory plugins run
            processed_container = self._tmp_queue.get(timeout=10)

        self.assertGreaterEqual(len(processed_container.processed_analysis), 3,
                                'at least one analysis not done')

    def _dummy_callback(self, fw):
        self._tmp_queue.put(fw)
    def setUp(self):
        self._tmp_dir = TemporaryDirectory()
        self._config = initialize_config(self._tmp_dir)
        self.elements_finished_analyzing = Value('i', 0)
        self.analysis_finished_event = Event()
        self.compare_finished_event = Event()

        self._mongo_server = MongoMgr(config=self._config, auth=False)
        self.backend_interface = BackEndDbInterface(config=self._config)

        self._analysis_scheduler = AnalysisScheduler(config=self._config, post_analysis=self.count_analysis_finished_event)
        self._unpack_scheduler = UnpackingScheduler(config=self._config, post_unpack=self._analysis_scheduler.add_task)
        self._compare_scheduler = CompareScheduler(config=self._config, callback=self.trigger_compare_finished_event)
Exemplo n.º 12
0
 def _start_backend(self):
     self.analysis_service = AnalysisScheduler(config=self.config)
     self.unpacking_service = UnpackingScheduler(
         config=self.config, post_unpack=self.analysis_service.add_task)
     self.compare_service = CompareScheduler(config=self.config)
     self.intercom = InterComBackEndBinding(
         config=self.config,
         analysis_service=self.analysis_service,
         compare_service=self.compare_service,
         unpacking_service=self.unpacking_service)
Exemplo n.º 13
0
    def __init__(self):
        super().__init__()

        try:
            self.analysis_service = AnalysisScheduler(config=self.config)
        except PluginInitException as error:
            logging.critical(
                f'Error during initialization of plugin {error.plugin.NAME}. Shutting down FACT backend'
            )
            complete_shutdown()
        self.unpacking_service = UnpackingScheduler(
            config=self.config,
            post_unpack=self.analysis_service.start_analysis_of_object,
            analysis_workload=self.analysis_service.get_scheduled_workload)
        self.compare_service = CompareScheduler(config=self.config)
        self.intercom = InterComBackEndBinding(
            config=self.config,
            analysis_service=self.analysis_service,
            compare_service=self.compare_service,
            unpacking_service=self.unpacking_service)
Exemplo n.º 14
0
 def _start_backend(self, post_analysis=None, compare_callback=None):
     self.analysis_service = AnalysisScheduler(config=self.config,
                                               post_analysis=post_analysis)
     self.unpacking_service = UnpackingScheduler(
         config=self.config, post_unpack=self.analysis_service.add_task)
     self.compare_service = CompareScheduler(config=self.config,
                                             callback=compare_callback)
     with patch.object(InterComBackEndBinding, 'WAIT_TIME', .5):
         self.intercom = InterComBackEndBinding(
             config=self.config,
             analysis_service=self.analysis_service,
             compare_service=self.compare_service,
             unpacking_service=self.unpacking_service)
Exemplo n.º 15
0
 def _start_backend(self, post_analysis=None, compare_callback=None):
     self.analysis_service = AnalysisScheduler(config=self.config,
                                               post_analysis=post_analysis)
     self.unpacking_service = UnpackingScheduler(
         config=self.config,
         post_unpack=self.analysis_service.start_analysis_of_object)
     self.compare_service = CompareScheduler(config=self.config,
                                             callback=compare_callback)
     self.intercom = InterComBackEndBinding(
         config=self.config,
         analysis_service=self.analysis_service,
         compare_service=self.compare_service,
         unpacking_service=self.unpacking_service)
Exemplo n.º 16
0
 def _start_backend(self, post_analysis=None, compare_callback=None):
     # pylint: disable=attribute-defined-outside-init
     self.analysis_service = AnalysisScheduler(config=self.config,
                                               post_analysis=post_analysis)
     self.unpacking_service = UnpackingScheduler(
         config=self.config,
         post_unpack=self.analysis_service.start_analysis_of_object)
     self.compare_service = CompareScheduler(config=self.config,
                                             callback=compare_callback)
     self.intercom = InterComBackEndBinding(
         config=self.config,
         analysis_service=self.analysis_service,
         compare_service=self.compare_service,
         unpacking_service=self.unpacking_service)
     self.fs_organizer = FSOrganizer(config=self.config)
class TestFileAddition(unittest.TestCase):
    @patch('unpacker.unpack.FS_Organizer', MockFSOrganizer)
    def setUp(self):
        self._tmp_dir = TemporaryDirectory()
        self._config = initialize_config(self._tmp_dir)
        self.elements_finished_analyzing = Value('i', 0)
        self.analysis_finished_event = Event()
        self.compare_finished_event = Event()

        self._mongo_server = MongoMgr(config=self._config, auth=False)
        self.backend_interface = BackEndDbInterface(config=self._config)

        self._analysis_scheduler = AnalysisScheduler(
            config=self._config,
            post_analysis=self.count_analysis_finished_event)
        self._unpack_scheduler = UnpackingScheduler(
            config=self._config, post_unpack=self._analysis_scheduler.add_task)
        self._compare_scheduler = CompareScheduler(
            config=self._config, callback=self.trigger_compare_finished_event)

    def count_analysis_finished_event(self, fw_object):
        self.backend_interface.add_analysis(fw_object)
        self.elements_finished_analyzing.value += 1
        if self.elements_finished_analyzing.value == 4 * 2 * 2:  # 2 container with 3 files each and 2 plugins
            self.analysis_finished_event.set()

    def trigger_compare_finished_event(self):
        self.compare_finished_event.set()

    def tearDown(self):
        self._compare_scheduler.shutdown()
        self._unpack_scheduler.shutdown()
        self._analysis_scheduler.shutdown()

        clean_test_database(self._config, get_database_names(self._config))
        self._mongo_server.shutdown()

        self._tmp_dir.cleanup()
        gc.collect()

    def test_unpack_analyse_and_compare(self):
        test_fw_1 = Firmware(
            file_path='{}/container/test.zip'.format(get_test_data_dir()))
        test_fw_1.release_date = '2017-01-01'
        test_fw_2 = Firmware(
            file_path='{}/regression_one'.format(get_test_data_dir()))
        test_fw_2.release_date = '2017-01-01'

        self._unpack_scheduler.add_task(test_fw_1)
        self._unpack_scheduler.add_task(test_fw_2)

        self.analysis_finished_event.wait(timeout=20)

        compare_id = normalize_compare_id(';'.join(
            [fw.uid for fw in [test_fw_1, test_fw_2]]))

        self.assertIsNone(
            self._compare_scheduler.add_task((compare_id, False)),
            'adding compare task creates error')

        self.compare_finished_event.wait(timeout=10)

        with ConnectTo(CompareDbInterface, self._config) as sc:
            result = sc.get_compare_result(compare_id)

        self.assertEqual(result['plugins']['Software'],
                         self._expected_result()['Software'])
        self.assertCountEqual(
            result['plugins']['File_Coverage']['files_in_common'],
            self._expected_result()['File_Coverage']['files_in_common'])

    @staticmethod
    def _expected_result():
        return {
            'File_Coverage': {
                'files_in_common': {
                    'all': [],
                    'collapse': False
                }
            },
            'Software': {
                'Compare Skipped': {
                    'all':
                    'Required analysis not present: [\'software_components\', \'software_components\']'
                }
            }
        }
Exemplo n.º 18
0
    global run
    logging.info('received {signum}. shutting down {name}...'.format(signum=signum, name=PROGRAM_NAME))
    run = False


if __name__ == '__main__':
    if was_started_by_start_fact():
        signal.signal(signal.SIGUSR1, shutdown)
        signal.signal(signal.SIGINT, lambda *_: None)
        os.setpgid(os.getpid(), os.getpid())  # reset pgid to self so that "complete_shutdown" doesn't run amok
    else:
        signal.signal(signal.SIGINT, shutdown)
    args, config = program_setup(PROGRAM_NAME, PROGRAM_DESCRIPTION)
    analysis_service = AnalysisScheduler(config=config)
    tagging_service = TaggingDaemon(analysis_scheduler=analysis_service)
    unpacking_service = UnpackingScheduler(config=config, post_unpack=analysis_service.start_analysis_of_object, analysis_workload=analysis_service.get_scheduled_workload)
    compare_service = CompareScheduler(config=config)
    intercom = InterComBackEndBinding(config=config, analysis_service=analysis_service, compare_service=compare_service, unpacking_service=unpacking_service)
    work_load_stat = WorkLoadStatistic(config=config)

    run = True
    while run:
        work_load_stat.update(unpacking_workload=unpacking_service.get_scheduled_workload(), analysis_workload=analysis_service.get_scheduled_workload())
        if any((unpacking_service.check_exceptions(), compare_service.check_exceptions(), analysis_service.check_exceptions())):
            break
        sleep(5)
        if args.testing:
            break

    logging.info('shutdown components')
    work_load_stat.shutdown()
Exemplo n.º 19
0
        signal.signal(signal.SIGINT, lambda *_: None)
        os.setpgid(os.getpid(), os.getpid(
        ))  # reset pgid to self so that "complete_shutdown" doesn't run amok
    else:
        signal.signal(signal.SIGINT, shutdown)
    args, config = program_setup(PROGRAM_NAME, PROGRAM_DESCRIPTION)
    try:
        analysis_service = AnalysisScheduler(config=config)
    except PluginInitException as error:
        logging.critical(
            'Error during initialization of plugin {}. Shutting down FACT backend'
            .format(error.plugin.NAME))
        complete_shutdown()
    tagging_service = TaggingDaemon(analysis_scheduler=analysis_service)
    unpacking_service = UnpackingScheduler(
        config=config,
        post_unpack=analysis_service.start_analysis_of_object,
        analysis_workload=analysis_service.get_scheduled_workload)
    compare_service = CompareScheduler(config=config)
    intercom = InterComBackEndBinding(config=config,
                                      analysis_service=analysis_service,
                                      compare_service=compare_service,
                                      unpacking_service=unpacking_service)
    work_load_stat = WorkLoadStatistic(config=config)

    run = True
    while run:
        work_load_stat.update(
            unpacking_workload=unpacking_service.get_scheduled_workload(),
            analysis_workload=analysis_service.get_scheduled_workload())
        if any((unpacking_service.check_exceptions(),
                compare_service.check_exceptions(),
Exemplo n.º 20
0
 def _start_scheduler(self):
     self.sched = UnpackingScheduler(
         config=self.config,
         post_unpack=self._mock_callback,
         analysis_workload=self._mock_get_analysis_workload)
Exemplo n.º 21
0
 def setUp(self):
     self._config = initialize_config(tmp_dir=None)
     self._tmp_queue = Queue()
     self._unpack_scheduler = UnpackingScheduler(
         config=self._config, post_unpack=self._dummy_callback)
class TestFileAddition(unittest.TestCase):

    @patch('unpacker.unpack.FS_Organizer', MockFSOrganizer)
    def setUp(self):
        self._tmp_dir = TemporaryDirectory()
        self._config = initialize_config(self._tmp_dir)
        self.elements_finished_analyzing = Value('i', 0)
        self.analysis_finished_event = Event()
        self.compare_finished_event = Event()

        self._mongo_server = MongoMgr(config=self._config, auth=False)
        self.backend_interface = BackEndDbInterface(config=self._config)

        self._analysis_scheduler = AnalysisScheduler(config=self._config, post_analysis=self.count_analysis_finished_event)
        self._unpack_scheduler = UnpackingScheduler(config=self._config, post_unpack=self._analysis_scheduler.add_task)
        self._compare_scheduler = CompareScheduler(config=self._config, callback=self.trigger_compare_finished_event)

    def count_analysis_finished_event(self, fw_object):
        self.backend_interface.add_object(fw_object)
        self.elements_finished_analyzing.value += 1
        if self.elements_finished_analyzing.value > 7:
            self.analysis_finished_event.set()

    def trigger_compare_finished_event(self):
        self.compare_finished_event.set()

    def tearDown(self):
        self._compare_scheduler.shutdown()
        self._unpack_scheduler.shutdown()
        self._analysis_scheduler.shutdown()

        clean_test_database(self._config, get_database_names(self._config))
        self._mongo_server.shutdown()

        self._tmp_dir.cleanup()
        gc.collect()

    def test_unpack_analyse_and_compare(self):
        test_fw_1 = Firmware(file_path='{}/container/test.zip'.format(get_test_data_dir()))
        test_fw_1.release_date = '2017-01-01'
        test_fw_2 = Firmware(file_path='{}/container/test.7z'.format(get_test_data_dir()))
        test_fw_2.release_date = '2017-01-01'

        self._unpack_scheduler.add_task(test_fw_1)
        self._unpack_scheduler.add_task(test_fw_2)

        self.analysis_finished_event.wait(timeout=10)

        compare_id = unify_string_list(';'.join([fw.uid for fw in [test_fw_1, test_fw_2]]))

        self.assertIsNone(self._compare_scheduler.add_task((compare_id, False)), 'adding compare task creates error')

        self.compare_finished_event.wait(timeout=10)

        with ConnectTo(CompareDbInterface, self._config) as sc:
            result = sc.get_compare_result(compare_id)

        self.assertFalse(isinstance(result, str), 'compare result should exist')
        self.assertEqual(result['plugins']['Software'], self._expected_result()['Software'])
        self.assertCountEqual(result['plugins']['File_Coverage']['exclusive_files'], self._expected_result()['File_Coverage']['exclusive_files'])

    @staticmethod
    def _expected_result():
        return {
            'File_Coverage': {
                'exclusive_files': {
                    '418a54d78550e8584291c96e5d6168133621f352bfc1d43cf84e81187fef4962_787': [],
                    'd38970f8c5153d1041810d0908292bc8df21e7fd88aab211a8fb96c54afe6b01_319': [],
                    'collapse': False
                },
                'files_in_common': {
                    'all': [
                        'faa11db49f32a90b51dfc3f0254f9fd7a7b46d0b570abd47e1943b86d554447a_28',
                        '289b5a050a83837f192d7129e4c4e02570b94b4924e50159fad5ed1067cfbfeb_20',
                        'd558c9339cb967341d701e3184f863d3928973fccdc1d96042583730b5c7b76a_62'
                    ],
                    'collapse': False
                },
                'similar_files': {}
            },
            'Software': {
                'Compare Skipped': {
                    'all': 'Required analysis not present: [\'software_components\', \'software_components\']'
                }
            }
        }
Exemplo n.º 23
0
 def _start_scheduler(self):
     self.scheduler = UnpackingScheduler(config=self.config, post_unpack=self._mock_callback, analysis_workload=self._mock_get_analysis_workload, db_interface=DatabaseMock())