Exemple #1
0
 def test_illegal_input(self):
     """
     Testing illegal input in smbo
     """
     cs = ConfigurationSpace()
     cs.add_hyperparameter(UniformFloatHyperparameter('test', 1, 10, 5))
     scen = Scenario({'run_obj': 'quality', 'cs': cs})
     stats = Stats(scen)
     # Recorded runs but no incumbent.
     stats.ta_runs = 10
     smac = SMAC(scen, stats=stats, rng=np.random.RandomState(42))
     self.assertRaises(ValueError, smac.optimize)
     # Incumbent but no recoreded runs.
     incumbent = cs.get_default_configuration()
     smac = SMAC(scen, restore_incumbent=incumbent,
                 rng=np.random.RandomState(42))
     self.assertRaises(ValueError, smac.optimize)
Exemple #2
0
def run_configuration(backend, config_id, task_id, configuration, run_args,
                      memory_limit, per_run_time_limit):
    evaluation, iterative_fit, early_stopping, N_FOLDS, searchspace = run_args

    # TODO make this an argument from the command line!
    scenario_mock = unittest.mock.Mock()
    scenario_mock.wallclock_limit = per_run_time_limit * 100
    scenario_mock.algo_runs_timelimit = per_run_time_limit * 100
    scenario_mock.ta_run_limit = np.inf
    stats = Stats(scenario_mock)
    stats.ta_runs = 2

    # Resampling strategies
    kwargs = {}
    if evaluation == "holdout" and iterative_fit:
        resampling_strategy = 'holdout-iterative-fit'
    elif evaluation == "holdout" and not iterative_fit:
        resampling_strategy = 'holdout'
    elif evaluation == "CV" and not iterative_fit:
        resampling_strategy = 'cv'
        kwargs = {'folds': N_FOLDS}
    elif evaluation == "CV" and iterative_fit:
        resampling_strategy = 'cv-iterative-fit'
        kwargs = {'folds': N_FOLDS}
    else:
        raise ValueError("Unknown resampling strategy", evaluation)

    iterative_wo_early_stopping = [
        'extra_trees', 'PassiveAggressiveWOEarlyStopping', 'random_forest',
        'SGDWOEarlyStopping', 'GradientBoostingClassifierWOEarlyStopping'
    ]
    iterative_w_early_stopping = [
        'extra_trees', 'passive_aggressive', 'random_forest', 'sgd',
        'gradient_boosting'
    ]

    if not early_stopping:
        add_classifier_wo_early_stopping()

    if searchspace == "iterative":
        include_estimator = iterative_w_early_stopping if early_stopping else iterative_wo_early_stopping
        include_preprocessor = [
            "no_preprocessing",
        ]
    elif searchspace == "full":
        assert early_stopping is True
        include_estimator = None
        include_preprocessor = None
    # elif searchspace == 'only-iterative-nopreproc':
    #    include_estimator = iterative_w_early_stopping if early_stopping else iterative_wo_early_stopping
    #    include_preprocessor = ["no_preprocessing", ]
    # elif searchspace == 'only-iterative-cheappreproc':
    #    include_estimator = iterative_w_early_stopping if early_stopping else iterative_wo_early_stopping
    #    include_preprocessor = ["no_preprocessing", 'kitchen_sinks', 'polynomial', 'select_percentile_classification', 'select_rates']
    # elif searchspace == 'only-iterative':
    #    include_estimator = iterative_w_early_stopping if early_stopping else iterative_wo_early_stopping
    #    include_preprocessor = None
    # elif searchspace == "gb":
    #    include_estimator = ['GradientBoostingClassifierWOEarlyStopping'] if early_stopping else ['GradientBoostingClassifierWEarlyStopping']
    #    include_preprocessor = None
    else:
        raise ValueError(searchspace)

    stats.start_timing()
    tae = ExecuteTaFuncWithQueue(
        backend=backend,
        autosklearn_seed=3,
        resampling_strategy=resampling_strategy,
        metric=balanced_accuracy,
        logger=logging.getLogger(name="%s_%s" % (task_id, config_id)),
        initial_num_run=2,
        stats=stats,
        runhistory=None,
        run_obj='quality',
        par_factor=1,
        all_scoring_functions=False,
        output_y_hat_optimization=True,
        include={
            "classifier": include_estimator,
            "feature_preprocessor": include_preprocessor
        },
        exclude=None,
        memory_limit=memory_limit,
        disable_file_output=True,
        init_params=None,
        **kwargs)

    # Finally run configuration
    status, cost, runtime, additional_run_info = tae.start(
        config=configuration,
        instance=None,
        cutoff=per_run_time_limit,
        instance_specific=None,
        capped=False,
    )

    return status, cost, runtime, additional_run_info