Пример #1
0
 def test_zero_or_negative_cutoff(self, pynisher_mock):
     config = unittest.mock.Mock()
     config.config_id = 198
     ta = ExecuteTaFuncWithQueue(
         backend=BackendMock(),
         autosklearn_seed=1,
         resampling_strategy='holdout',
         logger=self.logger,
         stats=self.stats,
         metric=accuracy,
         cost_for_crash=get_cost_of_crash(accuracy),
         abort_on_first_run_crash=False,
     )
     self.scenario.wallclock_limit = 5
     self.stats.submitted_ta_runs += 1
     run_info, run_value = ta.run_wrapper(
         RunInfo(config=config,
                 cutoff=9,
                 instance=None,
                 instance_specific=None,
                 seed=1,
                 capped=False))
     self.assertEqual(run_value.status, StatusType.STOP)
Пример #2
0
    def test_eval_with_limits_holdout_fail_memory_error(self, pynisher_mock):
        pynisher_mock.side_effect = MemoryError
        config = unittest.mock.Mock()
        config.config_id = 198
        ta = ExecuteTaFuncWithQueue(backend=self.backend, autosklearn_seed=1,
                                    port=self.logger_port,
                                    resampling_strategy='holdout',
                                    stats=self.stats,
                                    memory_limit=3072,
                                    metric=log_loss,
                                    cost_for_crash=get_cost_of_crash(log_loss),
                                    abort_on_first_run_crash=False,
                                    pynisher_context='fork',
                                    )
        info = ta.run_wrapper(RunInfo(config=config, cutoff=30, instance=None,
                                      instance_specific=None, seed=1, capped=False))
        self.assertEqual(info[1].status, StatusType.MEMOUT)

        # For logloss, worst possible result is MAXINT
        worst_possible_result = MAXINT
        self.assertEqual(info[1].cost, worst_possible_result)
        self.assertIsInstance(info[1].time, float)
        self.assertNotIn('exitcode', info[1].additional_info)
Пример #3
0
    def test_silent_exception_in_target_function(self):
        config = unittest.mock.Mock()
        config.config_id = 198

        delattr(self.backend, 'save_targets_ensemble')
        ta = ExecuteTaFuncWithQueue(backend=self.backend,
                                    port=self.logger_port,
                                    autosklearn_seed=1,
                                    resampling_strategy='holdout',
                                    stats=self.stats,
                                    memory_limit=3072,
                                    metric=accuracy,
                                    cost_for_crash=get_cost_of_crash(accuracy),
                                    abort_on_first_run_crash=False,
                                    iterative=False,
                                    pynisher_context='fork',
                                    )
        ta.pynisher_logger = unittest.mock.Mock()
        self.stats.submitted_ta_runs += 1
        info = ta.run_wrapper(RunInfo(config=config, cutoff=3000, instance=None,
                                      instance_specific=None, seed=1, capped=False))
        self.assertEqual(info[1].status, StatusType.CRASHED, msg=str(info[1].additional_info))
        self.assertEqual(info[1].cost, 1.0)
        self.assertIsInstance(info[1].time, float)
        self.assertIn(
            info[1].additional_info['error'],
            (
                """AttributeError("'BackendMock' object has no attribute """
                """'save_targets_ensemble'",)""",
                """AttributeError("'BackendMock' object has no attribute """
                """'save_targets_ensemble'")""",
                """AttributeError('save_targets_ensemble')"""
            )
        )
        self.assertNotIn('exitcode', info[1].additional_info)
        self.assertNotIn('exit_status', info[1].additional_info)
        self.assertNotIn('traceback', info[1])
Пример #4
0
    def test_exception_in_target_function(self, eval_holdout_mock):
        config = unittest.mock.Mock()
        config.config_id = 198

        eval_holdout_mock.side_effect = ValueError
        ta = ExecuteTaFuncWithQueue(backend=self.backend, autosklearn_seed=1,
                                    port=self.logger_port,
                                    resampling_strategy='holdout',
                                    stats=self.stats,
                                    memory_limit=3072,
                                    metric=accuracy,
                                    cost_for_crash=get_cost_of_crash(accuracy),
                                    abort_on_first_run_crash=False,
                                    pynisher_context='fork',
                                    )
        self.stats.submitted_ta_runs += 1
        info = ta.run_wrapper(RunInfo(config=config, cutoff=30, instance=None,
                                      instance_specific=None, seed=1, capped=False))
        self.assertEqual(info[1].status, StatusType.CRASHED)
        self.assertEqual(info[1].cost, 1.0)
        self.assertIsInstance(info[1].time, float)
        self.assertEqual(info[1].additional_info['error'], 'ValueError()')
        self.assertIn('traceback', info[1].additional_info)
        self.assertNotIn('exitcode', info[1].additional_info)
Пример #5
0
                resampling_strategy='test',
                memory_limit=memory_lim,
                disable_file_output=True,
                logger=logger,
                stats=stats,
                scoring_functions=scoring_functions,
                include=include,
                metric=automl_arguments['metric'],
                cost_for_crash=get_cost_of_crash(automl_arguments['metric']),
                abort_on_first_run_crash=False,
            )
            run_info, run_value = ta.run_wrapper(
                RunInfo(
                    config=config,
                    instance=None,
                    instance_specific=None,
                    seed=1,
                    cutoff=per_run_time_limit * 3,
                    capped=False,
                ))

            if run_value.status == StatusType.SUCCESS:
                assert len(
                    run_value.additional_info) > 1, run_value.additional_info

            # print(additional_run_info)

            validated_trajectory.append(
                list(entry) + [task_id] + [run_value.additional_info])
        print('Finished validating configuration %d/%d' %
              (i + 1, len(trajectory)))
Пример #6
0
    def test_eval_with_limits_holdout_timeout_with_results_in_queue(
            self, pynisher_mock):
        config = unittest.mock.Mock()
        config.config_id = 198

        def side_effect(**kwargs):
            queue = kwargs['queue']
            queue.put({
                'status': StatusType.SUCCESS,
                'loss': 0.5,
                'additional_run_info': {}
            })

        m1 = unittest.mock.Mock()
        m2 = unittest.mock.Mock()
        m1.return_value = m2
        pynisher_mock.return_value = m1
        m2.side_effect = side_effect
        m2.exit_status = pynisher.TimeoutException
        m2.wall_clock_time = 30

        # Test for a succesful run
        ta = ExecuteTaFuncWithQueue(
            backend=BackendMock(),
            autosklearn_seed=1,
            resampling_strategy='holdout',
            logger=self.logger,
            stats=self.stats,
            memory_limit=3072,
            metric=accuracy,
            cost_for_crash=get_cost_of_crash(accuracy),
            abort_on_first_run_crash=False,
        )
        info = ta.run_wrapper(
            RunInfo(config=config,
                    cutoff=30,
                    instance=None,
                    instance_specific=None,
                    seed=1,
                    capped=False))
        self.assertEqual(info[1].status, StatusType.SUCCESS)
        self.assertEqual(info[1].cost, 0.5)
        self.assertIsInstance(info[1].time, float)

        # And a crashed run which is in the queue
        def side_effect(**kwargs):
            queue = kwargs['queue']
            queue.put({
                'status': StatusType.CRASHED,
                'loss': 2.0,
                'additional_run_info': {}
            })

        m2.side_effect = side_effect
        ta = ExecuteTaFuncWithQueue(
            backend=BackendMock(),
            autosklearn_seed=1,
            resampling_strategy='holdout',
            logger=self.logger,
            stats=self.stats,
            memory_limit=3072,
            metric=accuracy,
            cost_for_crash=get_cost_of_crash(accuracy),
            abort_on_first_run_crash=False,
        )
        info = ta.run_wrapper(
            RunInfo(config=config,
                    cutoff=30,
                    instance=None,
                    instance_specific=None,
                    seed=1,
                    capped=False))
        self.assertEqual(info[1].status, StatusType.CRASHED)
        self.assertEqual(info[1].cost, 1.0)
        self.assertIsInstance(info[1].time, float)