Exemplo n.º 1
0
 def setUp(self):
     self.cs = ConfigurationSpace()
     self.scenario = Scenario({'cs': self.cs, 'output_dir': ''})
     self.stats = Stats(scenario=self.scenario)
Exemplo n.º 2
0
 def test_write_except(self, patch_isdir, patch_mkdirs):
     patch_isdir.return_value = False
     patch_mkdirs.side_effect = OSError()
     with self.assertRaises(OSError) as cm:
         Scenario(self.test_scenario_dict)
Exemplo n.º 3
0
 def test_str_cast_instances(self):
     self.scen = Scenario({'cs': None,
                           'instances': [[1], [2]]})
     self.assertIsInstance(self.scen.train_insts[0], str)
     self.assertIsInstance(self.scen.train_insts[1], str)
Exemplo n.º 4
0
from sklearn import datasets

from smac.scenario.scenario import Scenario

from optimize import run_tests
import configs

import warnings
warnings.filterwarnings("ignore")

scenarios = [
    (Scenario({
        "run_obj": "quality",
        "runcount_limit": 25,
        "deterministic": "true",
        "memory_limit": 3072,
        "output_dir": "./logs/"
    }), "25"),
    (Scenario({
        "run_obj": "quality",
        "runcount_limit": 50,
        "deterministic": "true",
        "memory_limit": 3072,
        "output_dir": "./logs/",
    }), "50"),
    (Scenario({
        "run_obj": "quality",
        "runcount_limit": 100,
        "deterministic": "true",
        "memory_limit": 3072,
        "output_dir": "./logs/"
Exemplo n.º 5
0
    def _main_cli(self):
        """Main function of SMAC for CLI interface
        
        Returns
        -------
        instance
            optimizer
        """
        self.logger.info("SMAC call: %s", " ".join(sys.argv))

        cmd_reader = CMDReader()
        args, _ = cmd_reader.read_cmd()

        root_logger = logging.getLogger()
        root_logger.setLevel(args.verbose_level)
        logger_handler = logging.StreamHandler(stream=sys.stdout)
        if root_logger.level >= logging.INFO:
            formatter = logging.Formatter("%(levelname)s:\t%(message)s")
        else:
            formatter = logging.Formatter(
                "%(asctime)s:%(levelname)s:%(name)s:%(message)s",
                "%Y-%m-%d %H:%M:%S")
        logger_handler.setFormatter(formatter)
        root_logger.addHandler(logger_handler)
        # remove default handler
        root_logger.removeHandler(root_logger.handlers[0])

        # Create defaults
        rh = None
        initial_configs = None
        stats = None
        incumbent = None

        # Create scenario-object
        scen = Scenario(args.scenario_file, [])
        self.cs = scen.cs

        if args.mode == "SMAC":
            optimizer = SMAC(scenario=scen,
                             rng=np.random.RandomState(args.seed),
                             runhistory=rh,
                             initial_configurations=initial_configs,
                             stats=stats,
                             restore_incumbent=incumbent,
                             run_id=args.seed)
        elif args.mode == "ROAR":
            optimizer = ROAR(scenario=scen,
                             rng=np.random.RandomState(args.seed),
                             runhistory=rh,
                             initial_configurations=initial_configs,
                             run_id=args.seed)
        elif args.mode == "EPILS":
            optimizer = EPILS(scenario=scen,
                              rng=np.random.RandomState(args.seed),
                              runhistory=rh,
                              initial_configurations=initial_configs,
                              run_id=args.seed)
        else:
            optimizer = None

        return optimizer
Exemplo n.º 6
0
Arquivo: svm.py Projeto: midasc/SMAC3
# For example, gamma can be either "auto" or a fixed float
gamma = CategoricalHyperparameter("gamma", ["auto", "value"],
                                  default="auto")  # only rbf, poly, sigmoid
gamma_value = UniformFloatHyperparameter("gamma_value", 0.0001, 8, default=1)
cs.add_hyperparameters([gamma, gamma_value])
# We only activate gamma_value if gamma is set to "value"
cs.add_condition(InCondition(child=gamma_value, parent=gamma,
                             values=["value"]))
# And again we can restrict the use of gamma in general to the choice of the kernel
cs.add_condition(
    InCondition(child=gamma, parent=kernel, values=["rbf", "poly", "sigmoid"]))

# Scenario object
scenario = Scenario({
    "run_obj": "quality",  # we optimize quality (alternatively runtime)
    "runcount-limit": 200,  # maximum function evaluations
    "cs": cs,  # configuration space
    "deterministic": "true"
})

# Example call of the function
# It returns: Status, Cost, Runtime, Additional Infos
def_value = svm_from_cfg(cs.get_default_configuration())
print("Default Value: %.2f" % (def_value))

# Optimize, using a SMAC-object
print("Optimizing! Depending on your machine, this might take a few minutes.")
smac = SMAC(scenario=scenario,
            rng=np.random.RandomState(42),
            tae_runner=svm_from_cfg)

incumbent = smac.optimize()
Exemplo n.º 7
0
 def setUp(self):
     self.cs = ConfigurationSpace()
     self.scenario = Scenario({'cs': self.cs, 'run_obj': 'quality',
                               'output_dir': ''})
     self.output_dirs = []
Exemplo n.º 8
0
def optimize():
    # We load the iris-dataset (a widely used benchmark)
    iris = datasets.load_iris()

    #logger = logging.getLogger("SVMExample")
    logging.basicConfig(level=logging.INFO)  # logging.DEBUG for debug output

    # Build Configuration Space which defines all parameters and their ranges
    cs = ConfigurationSpace()

    # We define a few possible types of SVM-kernels and add them as "kernel" to our cs
    kernel = CategoricalHyperparameter("kernel",
                                       ["linear", "rbf", "poly", "sigmoid"],
                                       default="poly")
    cs.add_hyperparameter(kernel)

    # There are some hyperparameters shared by all kernels
    C = UniformFloatHyperparameter("C", 0.001, 1000.0, default=1.0)
    shrinking = CategoricalHyperparameter("shrinking", ["true", "false"],
                                          default="true")
    cs.add_hyperparameters([C, shrinking])

    # Others are kernel-specific, so we can add conditions to limit the searchspace
    degree = UniformIntegerHyperparameter(
        "degree", 1, 5, default=3)  # Only used by kernel poly
    coef0 = UniformFloatHyperparameter("coef0", 0.0, 10.0,
                                       default=0.0)  # poly, sigmoid
    cs.add_hyperparameters([degree, coef0])
    use_degree = InCondition(child=degree, parent=kernel, values=["poly"])
    use_coef0 = InCondition(child=coef0,
                            parent=kernel,
                            values=["poly", "sigmoid"])
    cs.add_conditions([use_degree, use_coef0])

    # This also works for parameters that are a mix of categorical and values from a range of numbers
    # For example, gamma can be either "auto" or a fixed float
    gamma = CategoricalHyperparameter(
        "gamma", ["auto", "value"], default="auto")  # only rbf, poly, sigmoid
    gamma_value = UniformFloatHyperparameter("gamma_value",
                                             0.0001,
                                             8,
                                             default=1)
    cs.add_hyperparameters([gamma, gamma_value])
    # We only activate gamma_value if gamma is set to "value"
    cs.add_condition(
        InCondition(child=gamma_value, parent=gamma, values=["value"]))
    # And again we can restrict the use of gamma in general to the choice of the kernel
    cs.add_condition(
        InCondition(child=gamma,
                    parent=kernel,
                    values=["rbf", "poly", "sigmoid"]))

    # Scenario object
    scenario = Scenario("test/test_files/svm_scenario.txt")

    # Example call of the function
    # It returns: Status, Cost, Runtime, Additional Infos
    def_value = svm_from_cfg(cs.get_default_configuration())
    print("Default Value: %.2f" % (def_value))

    # Optimize, using a SMAC-object
    print(
        "Optimizing! Depending on your machine, this might take a few minutes."
    )
    smac = SMAC(scenario=scenario,
                rng=np.random.RandomState(42),
                tae_runner=svm_from_cfg)

    incumbent = smac.optimize()
    inc_value = svm_from_cfg(incumbent)

    print("Optimized Value: %.2f" % (inc_value))
Exemplo n.º 9
0
    def hpbandster2smac(self, folder2result, cs: ConfigurationSpace, backup_cs, output_dir: str):
        """Reading hpbandster-result-object and creating RunHistory and trajectory...
        treats each budget as an individual 'smac'-run, creates an
        output-directory with subdirectories for each budget.

        Parameters
        ----------
        folder2result: Dict(str : hpbandster.core.result.Result)
            folder mapping to bohb's result-objects
        cs: ConfigurationSpace
            the configuration space
        backup_cs: List[ConfigurationSpace]
            if loading a configuration fails, try configspaces from this list until succeed
        output_dir: str
            the output-dir to save the smac-runs to
        """
        # Create runhistories (one per budget)
        budget2rh = {}
        for folder, result in folder2result.items():
            id2config_mapping = result.get_id2config_mapping()
            skipped = {'None' : 0, 'NaN' : 0}
            for run in result.get_all_runs():
                if not run.budget in budget2rh:
                    budget2rh[run.budget] = RunHistory(average_cost)
                rh = budget2rh[run.budget]

                # Load config...
                try:
                    config = self._get_config(run.config_id, id2config_mapping, cs)
                except ValueError as err:
                    self.logger.debug("Loading configuration failed... trying alternatives", exc_info=1)
                    for bcs in backup_cs:
                        try:
                            config = self._get_config(run.config_id, id2config_mapping, bcs)
                            cs = bcs
                            break
                        except ValueError:
                            self.logger.debug("", exc_info=1)
                            pass
                    else:
                        self.logger.debug("None of the alternatives worked...")
                        raise ValueError("Your configspace seems to be corrupt. If you use floats (or mix up ints, bools and strings) as categoricals, "
                                         "please consider using the .json-format, as the .pcs-format cannot recover the type "
                                         "of categoricals. Otherwise please report this to "
                                         "https://github.com/automl/CAVE/issues (and attach the debug.log)")

                if run.loss is None:
                    skipped['None'] += 1
                    continue
                if np.isnan(run.loss):
                    skipped['NaN'] += 1
                    continue

                rh.add(config=config,
                       cost=run.loss,
                       time=run.time_stamps['finished'] - run.time_stamps['started'],
                       status=StatusType.SUCCESS,
                       seed=0,
                       additional_info={'info' : run.info, 'timestamps': run.time_stamps})

            self.logger.debug("Skipped %d None- and %d NaN-loss-values in BOHB-result", skipped['None'], skipped['NaN'])

        # Write to disk
        budget2path = {}  # paths to individual budgets
        self.logger.info("Assuming BOHB treats target algorithms as deterministic (and does not re-evaluate)")
        for b, rh in budget2rh.items():
            formatted = 'budget_{}'.format(int(b)) if (b).is_integer() else 'budget_{:.3f}'.format(b)
            output_path = os.path.join(output_dir, 'budget_' + str(int(b) if (b).is_integer() else b))
            budget2path[b] = output_path

            scenario = Scenario({'run_obj' : 'quality',
                                 'cs' : cs,
                                 'output_dir' : output_dir,
                                 'deterministic' : True,  # At the time of writing, BOHB is always treating ta's as deterministic
                                 })
            scenario.output_dir_for_this_run = output_path
            scenario.write()

            with open(os.path.join(output_path, 'configspace.json'), 'w') as fh:
                fh.write(pcs_json.write(cs))

            rh.save_json(fn=os.path.join(output_path, 'runhistory.json'))
            self.get_trajectory(folder2result, output_path, scenario, rh, budget=b)

        return budget2path
Exemplo n.º 10
0
def dqn_smac_wrapper(**params):

    logdir = params["logdir"]

    dqn_output_dir = os.path.join(
        logdir, 'dqn_output{:02d}'.format(params["instance_id"]))
    if not os.path.isdir(dqn_output_dir):
        os.makedirs(dqn_output_dir)
    smac_output_dir = os.path.join(
        logdir, 'smac3_output{:02d}'.format(params["instance_id"]))

    # logdir = os.path.join(args.logdir, str(datetime.datetime.today()))
    # os.makedirs(logdir)
    #args.logdir = logdir

    def dqn_from_cfg(cfg):
        """ Creates the A2C algorithm based on the given configuration.

        :param cfg: Configuration (ConfigSpace.ConfigurationSpace.Configuration)
            Configuration containing the parameters.
            Configurations are indexable!
        :return: A quality score of the algorithms perfromance
        """

        # For deactivated parameters the configuration stores None-values
        # This is not accepted by the a2c algorithm, hence we remove them.
        cfg = {k: cfg[k] for k in cfg if cfg[k]}

        # create run directory
        dir_list = glob.glob(os.path.join(dqn_output_dir, 'run*'))
        rundir = 'run{:02d}'.format(len(dir_list) +
                                    1)  # 'run' + str(len(dir_list) + 1)

        params["logdir"] = os.path.join(dqn_output_dir, rundir)
        os.makedirs(params["logdir"])
        # print(args.logdir)
        avg_perf, var_perf, max_return = run_dqn_smac(**params, **cfg)
        logger.info('average performance: %s' % avg_perf)
        logger.info('performance variance: %s' % var_perf)
        logger.info('maximum episode return: %s' % max_return)

        score = -avg_perf  # - (avg_perf - 0.2 * var_perf + 0.5 * max_return)  # SMAC is minimizing this.
        logger.info('Quality measure of the current learned agent: %s\n' %
                    score)
        return score

    logging.basicConfig(level=logging.INFO)  # logging.DEBUG for debug output

    logger = logging.getLogger()
    logger.propagate = False  # no duplicate logging outputs
    fh = logging.FileHandler(os.path.join(logdir, 'smac.log'))
    fh.setLevel(logging.INFO)
    fh.setFormatter(
        logging.Formatter('%(asctime)s - %(levelname)s:%(name)s: %(message)s'))
    logger.addHandler(fh)

    # Build configuration space and define all hyperparameters
    cs = ConfigurationSpace()
    epsilon = UniformFloatHyperparameter("epsilon",
                                         0.2,
                                         0.9,
                                         default_value=0.6)  # initial epsilon
    epsilon_decay = UniformFloatHyperparameter(
        "epsilon_decay", 0.2, 1, default_value=0.995)  # decay rate
    lr = UniformFloatHyperparameter("lr", 0.0005, 0.01, default_value=0.005)
    #units_shared_layer1 = UniformIntegerHyperparameter("units_layer1", 8, 100, default_value=24)
    #units_shared_layer2 = UniformIntegerHyperparameter("units_layer2", 8, 100, default_value=24)
    #units_policy_layer = UniformIntegerHyperparameter("units_layer3", 8, 100, default_value=24)
    activ_fcn = CategoricalHyperparameter("activ_fcn",
                                          ['relu6', 'elu', 'mixed'],
                                          default_value='relu6')
    #gamma = UniformFloatHyperparameter("gamma", 0.6, 0.90, default_value=0.80)
    tau = UniformFloatHyperparameter("tau", 0.5, 1., default_value=0.7)
    # update_interval = UniformIntegerHyperparameter("update_interval", 1, 300, default_value=50)
    if params["architecture"] == 'lstm' or (params["architecture"] == 'gru'):
        trace_length = UniformIntegerHyperparameter("trace_length",
                                                    1,
                                                    20,
                                                    default_value=8)
        # buffer_condition = LessThanCondition(child=trace_length, parent=params["buffer_size"])
        # pa["batch_size"] = 5
        cs.add_hyperparameters(
            [epsilon, epsilon_decay, activ_fcn, lr, tau, trace_length])

        # cs.add_hyperparameters([units_shared_layer1, units_shared_layer2, units_policy_layer,
        #                        epsilon, epsilon_decay, activ_fcn, lr, gamma, tau, trace_length])
    else:
        params.pop("batch_size")
        batch_size = UniformIntegerHyperparameter("batch_size",
                                                  1,
                                                  100,
                                                  default_value=30)
        # buffer_condition = LessThanCondition(child=batch_size, parent=params["buffer_size"], value=33)
        # InCondition(child=batch_size, value=33)
        # cs.add_hyperparameters([units_shared_layer1, units_shared_layer2, units_policy_layer,
        #                         epsilon, epsilon_decay, activ_fcn, lr, gamma, tau, batch_size])
        cs.add_hyperparameters(
            [epsilon, epsilon_decay, activ_fcn, lr, tau, batch_size])

    # Create scenario object
    logger.info('##############################################')
    logger.info('Setup SMAC instance')
    logger.info('##############################################')

    logger.info('Output_dir: %s' % smac_output_dir)
    if params["run_parallel"].lower() == "true":
        scenario = Scenario({
            "run_obj":
            "quality",  # we optimize quality of learned agent
            "runcount-limit":
            params["runcount_limit"],  # Maximum function evaluations
            "cs":
            cs,  # configutation space
            "deterministic":
            "true",
            "output_dir":
            smac_output_dir,
            "shared_model":
            True,
            "input_psmac_dirs":
            os.path.join(logdir, 'smac3_output*')
        })
    else:
        scenario = Scenario({
            "run_obj": "quality",  # we optimize quality of learned agent
            "runcount-limit":
            params["runcount_limit"],  # Maximum function evaluations
            "cs": cs,  # configutation space
            "deterministic": "true",
            "output_dir": smac_output_dir,
        })
    seed = np.random.RandomState(params["seed"])
    smac = SMAC(scenario=scenario, rng=seed, tae_runner=dqn_from_cfg)

    logger.info('##############################################')
    logger.info('Run Optimization')
    logger.info('##############################################')

    optimized_cfg = smac.optimize()

    logger.info('##############################################')
    logger.info('Evaluate Configuration found by SMAC')
    logger.info('##############################################')

    optimized_performance = dqn_from_cfg(optimized_cfg)
    logger.info("Optimized config")
    for k in optimized_cfg:
        logger.info(str(k) + ": " + str(optimized_cfg[k]))
    logger.info("Optimized performance: %.2f" % optimized_performance)

    with open(os.path.join(logdir, 'opt_hyperparams.txt'), 'a') as f:
        for k in optimized_cfg:
            f.write(str(k) + ': ' + str(optimized_cfg[k]) + '\n')
        f.write("Optimized performance: %.2f\n\n" % optimized_performance)

    with open(os.path.join(logdir, 'opt_hyperparams.csv'), 'a') as f:
        labels = []
        for k in optimized_cfg:
            labels.append(str(k))
        labels.insert(0, 'performance')
        labels.insert(0, 'instance_id')
        writer = csv.DictWriter(f, fieldnames=labels)
        if params["instance_id"] == 1:
            writer.writeheader()
        optimized_cfg._values["performance"] = optimized_performance
        optimized_cfg._values["instance_id"] = params["instance_id"]
        writer.writerow(optimized_cfg._values)

    return optimized_cfg
                                                  0.001,
                                                  1,
                                                  default_value=0.001,
                                                  log='True')
cs.add_hyperparameters([
    loss_scalar, loss_stddev, learning_rate, step_size_scalar, l2_regularizer,
    proximal_regularizer
])

# Scenario object
scenario = Scenario({
    "run_obj": "quality",  # we optimize quality
    # (alternatively runtime)
    "runcount-limit": 10,
    "cs": cs,  # configuration space
    "deterministic": "false",
    "shared_model": True,
    "input_psmac_dirs": "smac-output-learch",
    "cutoff_time": 9000,
    "wallclock_limit": 'inf'
})

# Example call of the function
# It returns: Status, Cost, Runtime, Additional Infos
def_value = learch(cs.get_default_configuration())
print("Default Value: %.2f" % def_value)

# Optimize, using a SMAC-object
print("Optimizing! Depending on your machine, this might take a few minutes.")
smac = SMAC4HPO(scenario=scenario,
                rng=np.random.RandomState(42),
Exemplo n.º 12
0
    def test_ta_integration_to_smbo(self):
        """
        In SMBO. 3 objects need to actively comunicate:
            -> stats
            -> epm
            -> runhistory

        This method makes sure that executed jobs are properly registered
        in the above objects

        It uses n_workers to test parallel and serial implementations!!
        """

        for n_workers in range(1, 2):
            # We create a controlled setting, in which we optimize x^2
            # This will allow us to make sure every component act as expected

            # FIRST: config space
            cs = ConfigurationSpace()
            cs.add_hyperparameter(UniformFloatHyperparameter('x', -10.0, 10.0))
            smac = SMAC4HPO(
                scenario=Scenario({
                    'n_workers': n_workers,
                    'cs': cs,
                    'runcount_limit': 5,
                    'run_obj': 'quality',
                    "deterministic": "true",
                    "initial_incumbent": "DEFAULT",
                    'output_dir': 'data-test_smbo'
                }),
                tae_runner=ExecuteTAFuncArray,
                tae_runner_kwargs={'ta': target},
            )

            # Register output dir for deletion
            self.output_dirs.append(smac.output_dir)

            smbo = smac.solver

            # SECOND: Intensifier that tracks configs
            all_configs = []

            def mock_get_next_run(**kwargs):
                config = cs.sample_configuration()
                all_configs.append(config)
                return (RunInfoIntent.RUN,
                        RunInfo(config=config,
                                instance=time.time() % 10,
                                instance_specific={},
                                seed=0,
                                cutoff=None,
                                capped=False,
                                budget=0.0))

            intensifier = unittest.mock.Mock()
            intensifier.num_run = 0
            intensifier.process_results.return_value = (0.0, 0.0)
            intensifier.get_next_run = mock_get_next_run
            smac.solver.intensifier = intensifier

            # THIRD: Run in this controlled setting
            smbo.run()

            # FOURTH: Checks

            # Make sure all configs where launched
            self.assertEqual(len(all_configs), 5)

            # Run history
            for k, v in smbo.runhistory.data.items():

                # All configuration should be successful
                self.assertEqual(v.status, StatusType.SUCCESS)

                # The value should be the square version of the config
                # The runhistory has  config_ids = {config: int}
                # The k here is {config_id: int}. We search for the actual config
                # by inverse searching this runhistory.config dict
                config = list(smbo.runhistory.config_ids.keys())[list(
                    smbo.runhistory.config_ids.values()).index(k.config_id)]

                self.assertEqual(v.cost, config.get('x')**2)

            # No config is lost in the config history
            self.assertCountEqual(smbo.runhistory.config_ids.keys(),
                                  all_configs)

            # Stats!
            # We do not exceed the number of target algorithm runs
            self.assertEqual(smbo.stats.submitted_ta_runs, len(all_configs))
            self.assertEqual(smbo.stats.finished_ta_runs, len(all_configs))

            # No config is lost
            self.assertEqual(smbo.stats.n_configs, len(all_configs))

            # The EPM can access all points. This is something that
            # also relies on the runhistory
            X, Y, X_config = smbo.epm_chooser._collect_data_to_train_model()
            self.assertEqual(X.shape[0], len(all_configs))
Exemplo n.º 13
0
                                          default_value=200)
learning_rate_init = UniformFloatHyperparameter('learning_rate_init',
                                                0.0001,
                                                1.0,
                                                default_value=0.001,
                                                log=True)
cs.add_hyperparameters(
    [n_layer, n_neurons, activation, batch_size, learning_rate_init])

# SMAC scenario object
scenario = Scenario({
    "run_obj": "quality",  # we optimize quality (alternative to runtime)
    "wallclock-limit":
    100,  # max duration to run the optimization (in seconds)
    "cs": cs,  # configuration space
    "deterministic": "true",
    "limit_resources": True,  # Uses pynisher to limit memory and runtime
    # Alternatively, you can also disable this.
    # Then you should handle runtime and memory yourself in the TA
    "cutoff": 30,  # runtime limit for target algorithm
    "memory_limit": 3072,  # adapt this to reasonable value for your hardware
})

# max budget for hyperband can be anything. Here, we set it to maximum no. of epochs to train the MLP for
max_iters = 50
# intensifier parameters
intensifier_kwargs = {'initial_budget': 5, 'max_budget': max_iters, 'eta': 3}
# To optimize, we pass the function to the SMAC-object
smac = HB4AC(scenario=scenario,
             rng=np.random.RandomState(42),
             tae_runner=mlp_from_cfg,
             intensifier_kwargs=intensifier_kwargs
Exemplo n.º 14
0
                                                   default=1)
max_leaf_nodes = UniformIntegerHyperparameter("max_leaf_nodes",
                                              10,
                                              1000,
                                              default=100)

cs.add_hyperparameters([
    num_trees, min_weight_frac_leaf, criterion, max_features,
    min_samples_to_split, min_samples_in_leaf, max_leaf_nodes
])

# SMAC scenario oject
scenario = Scenario({
    "run_obj": "quality",  # we optimize quality (alternative runtime)
    "runcount-limit": 50,  # maximum number of function evaluations
    "cs": cs,  # configuration space
    "deterministic": "true",
    "memory_limit": 3072,  # adapt this to reasonable value for your hardware
})

# To optimize, we pass the function to the SMAC-object
smac = SMAC(scenario=scenario,
            rng=np.random.RandomState(42),
            tae_runner=rf_from_cfg)

# Example call of the function with default values
# It returns: Status, Cost, Runtime, Additional Infos
def_value = smac.get_tae_runner().run(cs.get_default_configuration(), 1)[1]
print("Value for default configuration: %.2f" % (def_value))

# Start optimization
Exemplo n.º 15
0
    val = 100. * (x2 - x1**2.)**2. + (1 - x1)**2.
    return val


if __name__ == "__main__":
    # Build Configuration Space which defines all parameters and their ranges
    cs = ConfigurationSpace()
    x0 = UniformFloatHyperparameter("x0", -5, 10, default_value=-3)
    x1 = UniformFloatHyperparameter("x1", -5, 10, default_value=-4)
    cs.add_hyperparameters([x0, x1])

    # Scenario object
    scenario = Scenario({
        "run_obj": "quality",  # we optimize quality (alternatively runtime)
        "runcount-limit": 10,  # max. number of function evaluations
        "cs": cs,  # configuration space
        "deterministic": True
    })

    # Use 'gp' or 'gp_mcmc' here
    model_type = 'gp'

    # Example call of the function
    # It returns: Status, Cost, Runtime, Additional Infos
    def_value = rosenbrock_2d(cs.get_default_configuration())
    print("Default Value: %.2f" % def_value)

    # Optimize, using a SMAC-object
    print(
        "Optimizing! Depending on your machine, this might take a few minutes."
    )
Exemplo n.º 16
0
    default_value='constant')
eta0 = UniformFloatHyperparameter("eta0",
                                  0.00001,
                                  1,
                                  default_value=0.1,
                                  log=True)
# Add the parameters to configuration space
cs.add_hyperparameters([alpha, l1_ratio, learning_rate, eta0])

# SMAC scenario object
scenario = Scenario({
    "run_obj": "quality",  # we optimize quality (alternative to runtime)
    "wallclock-limit":
    100,  # max duration to run the optimization (in seconds)
    "cs": cs,  # configuration space
    "deterministic": True,
    "limit_resources": True,  # Uses pynisher to limit memory and runtime
    "memory_limit": 3072,  # adapt this to reasonable value for your hardware
    "cutoff": 3,  # runtime limit for the target algorithm
    "instances": instances  # Optimize across all given instances
})

# intensifier parameters
# if no argument provided for budgets, hyperband decides them based on the number of instances available
intensifier_kwargs = {
    'initial_budget': 1,
    'max_budget': 45,
    'eta': 3,
    'instance_order':
    None,  # You can also shuffle the order of using instances by this parameter.
    # 'shuffle' will shuffle instances before each SH run and
Exemplo n.º 17
0
    def initialize(self, stamp, seed, acq_func, wallclock_limit, runcount_limit, test_function, random_leaf_size, min_x, min_y):
        # Check if caching is enabled
        caching = True if acq_func[:2] == "pc" else False

        # Build runhistory
        # TODO Does this work correctly for non-caching?
        runhistory = PCRunHistory(average_cost)

        # Setup statistics
        info = {
            'stamp': stamp,
            'caching': caching,
            'acquisition_function': acq_func,
            'wallclock_limit': wallclock_limit
        }

        self.statistics = WhiteboxStatistics(stamp,
                                     self.output_dir,
                                     information=info,
                                     total_runtime=wallclock_limit)

        # Set up tae runner
        if caching:
            if test_function == "beale":
                tae = CachedBeale(runhistory=runhistory,
                                  statistics=self.statistics,
                                  min_x=min_x,
                                  min_y=min_y)
            else:
                tae = CachedParaboloid2Minima(runhistory=runhistory,
                                              statistics=self.statistics,
                                              min_x=min_x,
                                              min_y=min_y)
        else:
            if test_function == "beale":
                tae = Beale(runhistory=runhistory,
                            statistics=self.statistics,
                            min_x=min_x,
                            min_y=min_y)
            else:
                tae = Paraboloid2Minima(runhistory=runhistory,
                                        statistics=self.statistics,
                                        min_x=min_x,
                                        min_y=min_y)

        # setup config space
        self.config_space = tae.get_config_space(seed=seed)

        # Build scenario
        args = {'cs': self.config_space,
                'run_obj': "quality",
                'wallclock_limit': wallclock_limit,
                'runcount_limit': runcount_limit,
                'deterministic': "true"
                }
        scenario = Scenario(args)

        # Build stats
        stats = WhiteboxStats(scenario,
                                output_dir=self.output_dir + "/smac/",
                                stamp=stamp)
        # Give the stats to the tae runner to simulate timing
        tae.set_smac_stats(stats)

        # Choose acquisition function
        if acq_func in ["eips", "m-eips", "pc-m-eips", "pceips", "pc-m-pceips"]:
            model_target_names = ['cost', 'time']
        elif acq_func in ["ei", "m-ei", "pc-m-ei"]:
            model_target_names = ['cost']
        else:
            # Not a valid acquisition function
            raise ValueError("The provided acquisition function is not valid")

        # Setup trajectory file
        trajectory_path = self.output_dir + "/logging/" + stamp + "/" # + self.data_path.split("/")[-1] + "/" + str(stamp)
        if not os.path.exists(trajectory_path):
            os.makedirs(trajectory_path)
        self.trajectory_path_json = trajectory_path + "/traj_aclib2.json"
        self.trajectory_path_csv = trajectory_path + "/traj_old.csv"

        # Build SMBO object
        smbo_builder = SMBOBuilder()
        self.smbo = smbo_builder.build_pc_smbo(
            tae_runner=tae,
            stats=stats,
            scenario=scenario,
            runhistory=runhistory,
            aggregate_func=average_cost,
            acq_func_name=acq_func,
            model_target_names=model_target_names,
            logging_directory=trajectory_path,
            random_leaf_size=random_leaf_size,
            constant_pipeline_steps=["preprocessor"],
            variable_pipeline_steps=["classifier"])
Exemplo n.º 18
0
    def run(self, rng, n_iters=10, n_jobs=1):
        # Construct configuration space
        cs_combined = CS.ConfigurationSpace()

        model_choice = CSH.CategoricalHyperparameter(
            "model",
            choices=[
                model_factory.name for model_factory, _ in self.model_factories
            ])
        cs_combined.add_hyperparameter(model_choice)
        for model_factory, cs in self.model_factories:
            model_name = model_factory.name
            add_configuration_space(cs_combined,
                                    "_" + model_name,
                                    cs,
                                    parent_hyperparameter={
                                        "parent": model_choice,
                                        "value": model_name
                                    })

        smac_rng = np.random.RandomState(rng.integers(1 << 31))
        scenario = Scenario({
            "run_obj": "quality",
            "runcount-limit": n_iters,
            "cs": cs_combined,
            "deterministic": "true",
            "limit_resources": False
        })

        smac = SMAC4HPO(scenario=scenario,
                        rng=smac_rng,
                        tae_runner=self._evaluate,
                        n_jobs=n_jobs)

        incumbent = smac.optimize()

        ret_value = dict()
        inc_cost = float("inf")
        inc_costs = []
        evaluated_costs = []
        evaluated_cfgs = []
        inc_cfgs = []
        costs_and_config_ids = []
        inc_cfg = None
        for key, val in smac.runhistory.data.items():
            cfg = smac.runhistory.ids_config[key.config_id]
            if val.cost < inc_cost:
                inc_cost = val.cost
                inc_cfg = cfg
            inc_costs.append(inc_cost)
            evaluated_costs.append(val.cost)
            evaluated_cfgs.append(cfg)
            inc_cfgs.append(inc_cfg)

        tune_result = ModelTuneResult(inc_cfg=inc_cfg,
                                      cfgs=evaluated_cfgs,
                                      costs=evaluated_costs,
                                      inc_costs=inc_costs,
                                      inc_cfgs=inc_cfgs)

        model_factory, inc_cfg = self._get_model_cfg(incumbent)
        final_model = model_factory(inc_cfg, self.evaluator.trajs)

        return final_model, tune_result
Exemplo n.º 19
0
    def test_get_X_y(self):
        '''
            add some data to RH and check returned values in X,y format
        '''

        self.scen = Scenario({
            'cutoff_time': 20,
            'cs': self.cs,
            'run_obj': 'runtime',
            'instances': [['1'], ['2']],
            'features': {
                '1': [1, 1],
                '2': [2, 2]
            },
            'output_dir': ''
        })

        rh2epm = runhistory2epm.RunHistory2EPM4Cost(num_params=2,
                                                    scenario=self.scen)

        self.rh.add(config=self.config1,
                    cost=1,
                    time=10,
                    status=StatusType.SUCCESS,
                    instance_id='1',
                    seed=None,
                    additional_info=None)

        self.rh.add(config=self.config1,
                    cost=2,
                    time=10,
                    status=StatusType.SUCCESS,
                    instance_id='2',
                    seed=None,
                    additional_info=None)

        self.rh.add(config=self.config2,
                    cost=1,
                    time=10,
                    status=StatusType.TIMEOUT,
                    instance_id='1',
                    seed=None,
                    additional_info=None)

        self.rh.add(config=self.config2,
                    cost=0.1,
                    time=10,
                    status=StatusType.CAPPED,
                    instance_id='2',
                    seed=None,
                    additional_info=None)

        X, y, c = rh2epm.get_X_y(self.rh)

        print(X, y, c)

        X_sol = np.array([[0, 100, 1, 1], [0, 100, 2, 2], [100, 0, 1, 1],
                          [100, 0, 2, 2]])
        self.assertTrue(np.all(X == X_sol))

        y_sol = np.array([1, 2, 1, 0.1])
        self.assertTrue(np.all(y == y_sol))

        c_sol = np.array([False, False, True, True])
        self.assertTrue(np.all(c == c_sol))
Exemplo n.º 20
0
    def test_against_smac(self):
        # PHOTON implementation
        self.pipe.add(PipelineElement('StandardScaler'))
        # then do feature selection using a PCA, specify which values to try in the hyperparameter search
        self.pipe += PipelineElement(
            'PCA', hyperparameters={'n_components': IntegerRange(5, 30)})
        # engage and optimize the good old SVM for Classification
        self.pipe += PipelineElement(
            'SVC',
            hyperparameters={
                'kernel': Categorical(["linear", "rbf", 'poly', 'sigmoid']),
                'C': FloatRange(0.5, 200)
            },
            gamma='auto')

        self.X, self.y = self.simple_classification()
        self.pipe.fit(self.X, self.y)

        # AUTO ML direct
        # Build Configuration Space which defines all parameters and their ranges
        cs = ConfigurationSpace()

        # We define a few possible types of SVM-kernels and add them as "kernel" to our cs
        n_components = UniformIntegerHyperparameter("PCA__n_components", 5,
                                                    30)  # , default_value=5)
        cs.add_hyperparameter(n_components)

        kernel = CategoricalHyperparameter(
            "SVC__kernel",
            ["linear", "rbf", 'poly', 'sigmoid'])  #, default_value="linear")
        cs.add_hyperparameter(kernel)

        c = UniformFloatHyperparameter("SVC__C", 0.5, 200)  #, default_value=1)
        cs.add_hyperparameter(c)

        # Scenario object
        scenario = Scenario({
            "run_obj":
            "quality",  # we optimize quality (alternatively runtime)
            "runcount-limit": 800,  # maximum function evaluations
            "cs": cs,  # configuration space
            "deterministic": "true",
            "shared_model": "false",  # !!!!
            "wallclock_limit": self.time_limit
        })

        # Optimize, using a SMAC-object
        print(
            "Optimizing! Depending on your machine, this might take a few minutes."
        )
        smac = SMAC4BO(scenario=scenario,
                       rng=np.random.RandomState(42),
                       tae_runner=self.objective_function)

        self.traurig = smac

        incumbent = smac.optimize()

        inc_value = self.objective_function(incumbent)

        print(incumbent)
        print(inc_value)

        runhistory_photon = self.smac_helper["data"].solver.runhistory
        runhistory_original = smac.solver.runhistory

        x_ax = range(
            1,
            min(len(runhistory_original.cost_per_config.keys()),
                len(runhistory_photon.cost_per_config.keys())) + 1)
        y_ax_original = [
            runhistory_original.cost_per_config[tmp] for tmp in x_ax
        ]
        y_ax_photon = [runhistory_photon.cost_per_config[tmp] for tmp in x_ax]

        y_ax_original_inc = [min(y_ax_original[:tmp + 1]) for tmp in x_ax]
        y_ax_photon_inc = [min(y_ax_photon[:tmp + 1]) for tmp in x_ax]

        plt.figure(figsize=(10, 7))
        plt.plot(x_ax, y_ax_original, 'g', label='Original')
        plt.plot(x_ax, y_ax_photon, 'b', label='PHOTON')
        plt.plot(x_ax, y_ax_photon_inc, 'r', label='PHOTON Incumbent')
        plt.plot(x_ax, y_ax_original_inc, 'k', label='Original Incumbent')
        plt.title('Photon Prove')
        plt.xlabel('X')
        plt.ylabel('Y')
        plt.legend(loc='best')
        plt.show()

        def neighbours(items, fill=None):
            before = itertools.chain([fill], items)
            after = itertools.chain(
                items,
                [fill])  # You could use itertools.zip_longest() later instead.
            next(after)
            for a, b, c in zip(before, items, after):
                yield [value for value in (a, b, c) if value is not fill]

        print("---------------")
        original_pairing = [
            sum(values) / len(values) for values in neighbours(y_ax_original)
        ]
        bias_term = np.mean([
            abs(y_ax_original_inc[t] - y_ax_photon_inc[t])
            for t in range(len(y_ax_photon_inc))
        ])
        photon_pairing = [
            sum(values) / len(values) - bias_term
            for values in neighbours(y_ax_photon)
        ]
        counter = 0
        for i, x in enumerate(x_ax):
            if abs(original_pairing[i] - photon_pairing[i]) > 0.05:
                counter += 1
            self.assertLessEqual(counter / len(x_ax), 0.15)
Exemplo n.º 21
0
                                            default_value=2)

optimizer = CategoricalHyperparameter("optimizer",
                                      ['adam', 'sgd', 'nadam', 'RMSprop'],
                                      default_value='RMSprop')
optimizer_lr = CategoricalHyperparameter("optimizer_lr",
                                         [.0001, .0003, .001, .003, .01],
                                         default_value=.0003)
learning_decay_rate = UniformFloatHyperparameter("learning_decay_rate",
                                                 0,
                                                 0.9,
                                                 default_value=.6)

cs.add_hyperparameters([
    first_kernel_size, conv_filters, n_conv, dropout, activation, dense_width,
    dense_length, optimizer, optimizer_lr, learning_decay_rate
])

scenario = Scenario({
    "run_obj": "quality",
    "runcount-limit": 128,
    "cs": cs,
    "deterministic": "true"
})
scenario.output_dir_for_this_run = "C:\\NNwork\\SMAC3out"
scenario.output_dir = "C:\\NNwork\\SMAC3out"
smac = SMAC(scenario=scenario,
            rng=np.random.RandomState(23),
            tae_runner=cnn_from_cfg)

print_incumb(smac.optimize())
Exemplo n.º 22
0
        def test_photon_implementation_switch(self):
            # PHOTON implementation
            self.pipe.add(PipelineElement('StandardScaler'))
            self.pipe += PipelineElement(
                'PCA', hyperparameters={'n_components': IntegerRange(5, 30)})
            estimator_siwtch = Switch("Estimator")
            estimator_siwtch += PipelineElement('SVC',
                                                hyperparameters={
                                                    'kernel':
                                                    Categorical(
                                                        ["rbf", 'poly']),
                                                    'C':
                                                    FloatRange(0.5, 200)
                                                },
                                                gamma='auto')
            estimator_siwtch += PipelineElement('RandomForestClassifier',
                                                hyperparameters={
                                                    'criterion':
                                                    Categorical(
                                                        ['gini', 'entropy']),
                                                    'min_samples_split':
                                                    IntegerRange(2, 4)
                                                })
            self.pipe += estimator_siwtch
            self.X, self.y = self.simple_classification()
            self.pipe.fit(self.X, self.y)

            # direct AUTO ML implementation

            # Build Configuration Space which defines all parameters and their ranges
            cs = ConfigurationSpace()
            n_components = UniformIntegerHyperparameter(
                "PCA__n_components", 5, 30)
            cs.add_hyperparameter(n_components)

            switch = CategoricalHyperparameter("Estimator_switch",
                                               ['svc', 'rf'])
            cs.add_hyperparameter(switch)

            kernel = CategoricalHyperparameter("SVC__kernel", ["rbf", 'poly'])
            cs.add_hyperparameter(kernel)
            c = UniformFloatHyperparameter("SVC__C", 0.5, 200)
            cs.add_hyperparameter(c)
            use_svc_c = InCondition(child=kernel,
                                    parent=switch,
                                    values=["svc"])
            use_svc_kernel = InCondition(child=c,
                                         parent=switch,
                                         values=["svc"])

            criterion = CategoricalHyperparameter(
                "RandomForestClassifier__criterion", ['gini', 'entropy'])
            cs.add_hyperparameter(criterion)
            minsplit = UniformIntegerHyperparameter(
                "RandomForestClassifier__min_samples_split", 2, 4)
            cs.add_hyperparameter(minsplit)

            use_rf_crit = InCondition(child=criterion,
                                      parent=switch,
                                      values=["rf"])
            use_rf_minsplit = InCondition(child=minsplit,
                                          parent=switch,
                                          values=["rf"])

            cs.add_conditions(
                [use_svc_c, use_svc_kernel, use_rf_crit, use_rf_minsplit])

            # Scenario object
            scenario = Scenario({
                "run_obj": "quality",
                "cs": cs,
                "deterministic": "true",
                "wallclock_limit": self.time_limit,
                "limit_resources": False,
                'abort_on_first_run_crash': False
            })

            # Optimize, using a SMAC directly
            smac = SMAC4HPO(scenario=scenario,
                            rng=42,
                            tae_runner=self.objective_function_switch)
            _ = smac.optimize()

            runhistory_photon = self.smac_helper["data"].solver.runhistory
            runhistory_original = smac.solver.runhistory

            x_ax = range(
                1,
                min(len(runhistory_original._cost_per_config.keys()),
                    len(runhistory_photon._cost_per_config.keys())) + 1)
            y_ax_original = [
                runhistory_original._cost_per_config[tmp] for tmp in x_ax
            ]
            y_ax_photon = [
                runhistory_photon._cost_per_config[tmp] for tmp in x_ax
            ]

            min_len = min(len(y_ax_original), len(y_ax_photon))
            self.assertLessEqual(
                np.max(
                    np.abs(
                        np.array(y_ax_original[:min_len]) -
                        np.array(y_ax_photon[:min_len]))), 0.01)
Exemplo n.º 23
0
def get_parameters(train_data, kfold, iterations):
    def compute(config):

        num_leaves = int(config['num_leaves'])
        max_bin = int(config['max_bin'])
        min_data_in_leaf = int(config['min_data_in_leaf'])
        # num_trees = int(config['num_trees'])
        # bagging_fraction = config['bagging_fraction']
        # bagging_freq = int(config['bagging_freq'])
        feature_fraction = config['feature_fraction']
        # lambda_l1 = config['lambda_l1'],
        lambda_l2 = config['lambda_l2'],
        min_gain_to_split = config['min_gain_to_split']
        learning_rate = config['learning_rate']

        parameters = {
            'boosting_type': 'gbdt',
            'objective': 'regression_l2',
            'learning_rate': learning_rate,
            'num_leaves': num_leaves,
            # 'max_depth': max_depth,
            'min_data_in_leaf': min_data_in_leaf,
            # 'num_trees': 10000,
            'max_bin': max_bin,
            # 'bagging_fraction': bagging_fraction,
            # 'bagging_freq': bagging_freq,
            'feature_fraction': feature_fraction,
            'verbose': -1,
            # 'lambda_l1': lambda_l1,
            'lambda_l2': lambda_l2,
            'min_gain_to_split': min_gain_to_split
        }

        eval_hist = lgb.cv(parameters,
                           train_data,
                           folds=KFold(kfold),
                           stratified=False,
                           shuffle=False,
                           verbose_eval=True,
                           early_stopping_rounds=10)

        loss = min(eval_hist['l2-mean'])

        # loss = parameters['num_leaves'] * parameters['num_leaves']
        return loss

    logging.basicConfig(level=logging.DEBUG)  # logging.DEBUG for debug output

    # Build Configuration Space which defines all parameters and their ranges

    def get_configspace():
        config_space = CS.ConfigurationSpace()
        # config_space.add_hyperparameter(CS.UniformIntegerHyperparameter('max_depth', lower=3, upper=5))
        config_space.add_hyperparameter(
            CS.UniformIntegerHyperparameter('num_leaves', lower=10, upper=35))
        # config_space.add_hyperparameter(CS.UniformIntegerHyperparameter('num_trees', lower=100, upper=500))
        config_space.add_hyperparameter(
            CS.UniformIntegerHyperparameter('min_data_in_leaf',
                                            lower=1,
                                            upper=12))
        config_space.add_hyperparameter(
            CS.UniformIntegerHyperparameter('max_bin', lower=20, upper=255))
        # config_space.add_hyperparameter(CS.UniformFloatHyperparameter('bagging_fraction', lower=0.1, upper=1))
        # config_space.add_hyperparameter(CS.UniformIntegerHyperparameter('bagging_freq', lower=0, upper=500))
        config_space.add_hyperparameter(
            CS.UniformFloatHyperparameter('feature_fraction',
                                          lower=0.01,
                                          upper=1.0))
        # config_space.add_hyperparameter(CS.UniformFloatHyperparameter('lambda_l1', lower=0, upper=1))
        config_space.add_hyperparameter(
            CS.UniformIntegerHyperparameter('lambda_l2', lower=0, upper=70))
        config_space.add_hyperparameter(
            CS.UniformFloatHyperparameter('min_gain_to_split',
                                          lower=0.0,
                                          upper=1.0))
        config_space.add_hyperparameter(
            CS.UniformFloatHyperparameter('learning_rate',
                                          lower=0.005,
                                          upper=0.5))

        return (config_space)

    cs = get_configspace()
    # Scenario object
    scenario = Scenario({
        "run_obj": "quality",  # we optimize quality (alternatively runtime)
        "runcount-limit":
        iterations,  # max. number of function evaluations; for this example set to a low number
        "cs": cs,  # configuration space
        "deterministic": "false"
    })

    # Example call of the function
    # It returns: Status, Cost, Runtime, Additional Infos
    def_value = compute(cs.get_default_configuration())
    print("Default Value: %.2f" % def_value)

    # Optimize, using a SMAC-object
    print(
        "Optimizing! Depending on your machine, this might take a few minutes."
    )
    smac = SMAC4HPO(
        scenario=scenario,
        rng=np.random.RandomState(42),
        tae_runner=compute,
    )

    incumbent = smac.optimize()
    print("haole")

    inc_value = compute(incumbent)

    return inc_value
Exemplo n.º 24
0
        def test_photon_implementation_simple(self):
            # PHOTON implementation
            self.pipe.add(PipelineElement('StandardScaler'))
            self.pipe += PipelineElement(
                'PCA', hyperparameters={'n_components': IntegerRange(5, 30)})
            self.pipe += PipelineElement('SVC',
                                         hyperparameters={
                                             'kernel':
                                             Categorical(["rbf", 'poly']),
                                             'C': FloatRange(0.5, 200)
                                         },
                                         gamma='auto')
            self.X, self.y = self.simple_classification()
            self.pipe.fit(self.X, self.y)

            # direct AUTO ML implementation
            # Build Configuration Space which defines all parameters and their ranges
            cs = ConfigurationSpace()
            n_components = UniformIntegerHyperparameter(
                "PCA__n_components", 5, 30)
            cs.add_hyperparameter(n_components)
            kernel = CategoricalHyperparameter("SVC__kernel", ["rbf", 'poly'])
            cs.add_hyperparameter(kernel)
            c = UniformFloatHyperparameter("SVC__C", 0.5, 200)
            cs.add_hyperparameter(c)

            # Scenario object
            scenario = Scenario({
                "run_obj": "quality",
                "cs": cs,
                "deterministic": "true",
                "wallclock_limit": self.time_limit,
                "limit_resources": False,
                'abort_on_first_run_crash': False
            })

            # Optimize, using a SMAC directly
            smac = SMAC4HPO(scenario=scenario,
                            rng=42,
                            tae_runner=self.objective_function_simple)
            _ = smac.optimize()

            runhistory_photon = self.smac_helper["data"].solver.runhistory
            runhistory_original = smac.solver.runhistory

            x_ax = range(
                1,
                min(len(runhistory_original._cost_per_config.keys()),
                    len(runhistory_photon._cost_per_config.keys())) + 1)
            y_ax_original = [
                runhistory_original._cost_per_config[tmp] for tmp in x_ax
            ]
            y_ax_photon = [
                runhistory_photon._cost_per_config[tmp] for tmp in x_ax
            ]

            y_ax_original_inc = [min(y_ax_original[:tmp + 1]) for tmp in x_ax]
            y_ax_photon_inc = [min(y_ax_photon[:tmp + 1]) for tmp in x_ax]

            plot = False
            if plot:
                plt.figure(figsize=(10, 7))
                plt.plot(x_ax, y_ax_original, 'g', label='Original')
                plt.plot(x_ax, y_ax_photon, 'b', label='PHOTON')
                plt.plot(x_ax, y_ax_photon_inc, 'r', label='PHOTON Incumbent')
                plt.plot(x_ax,
                         y_ax_original_inc,
                         'k',
                         label='Original Incumbent')
                plt.title('Photon Prove')
                plt.xlabel('X')
                plt.ylabel('Y')
                plt.legend(loc='best')
                plt.savefig("smac.png")

            min_len = min(len(y_ax_original), len(y_ax_photon))
            self.assertLessEqual(
                np.max(
                    np.abs(
                        np.array(y_ax_original[:min_len]) -
                        np.array(y_ax_photon[:min_len]))), 0.01)
Exemplo n.º 25
0
l1_ratio = UniformFloatHyperparameter("l1_ratio", 0.0, 1.0, default_value=0.15)

cs.add_hyperparameters([
    penalty, dual, tol, C, fit_intercept, intercept_scaling, solver, max_iter,
    multi_class, warm_start, l1_ratio
])

# some hyperparameters depend on others

#This work is done by 'if condition' in the called function

# Scenario object
scenario = Scenario({
    "run_obj": "quality",  # we optimize quality (alternatively runtime)
    "runcount-limit":
    500,  # max. number of function evaluations; for this example set to a low number
    "cs": cs,  # configuration space
    "deterministic": "true"
})

# Example call of the function
# It returns: Status, Cost, Runtime, Additional Infos
def_value = LR_from_cfg(cs.get_default_configuration())
print("Default Value: %.2f" % (def_value))

# Optimize, using a SMAC-object
print("Optimizing! Depending on your machine, this might take a few minutes.")
smac = SMAC4HPO(scenario=scenario,
                rng=np.random.RandomState(42),
                tae_runner=LR_from_cfg)
Exemplo n.º 26
0
def fmin_smac(
        func: typing.Callable,
        x0: typing.List[float],
        bounds: typing.List[typing.Iterable[float]],
        maxfun: int = -1,
        rng: typing.Union[np.random.RandomState, int] = None,
        scenario_args: typing.Mapping[str, typing.Any] = None,
        tae_runner_kwargs: typing.Optional[typing.Dict[str,
                                                       typing.Any]] = None,
        **kwargs: typing.Any) -> typing.Tuple[Configuration, float, SMAC4HPO]:
    """
    Minimize a function func using the SMAC4HPO facade
    (i.e., a modified version of SMAC).
    This function is a convenience wrapper for the SMAC4HPO class.

    Parameters
    ----------
    func : typing.Callable
        Function to minimize.
    x0 : typing.List[float]
        Initial guess/default configuration.
    bounds : typing.List[typing.List[float]]
        ``(min, max)`` pairs for each element in ``x``, defining the bound on
        that parameters.
    maxfun : int, optional
        Maximum number of function evaluations.
    rng : np.random.RandomState, optional
            Random number generator used by SMAC.
    scenario_args: typing.Mapping[str,typing.Any]
        Arguments passed to the scenario
        See smac.scenario.scenario.Scenario
    **kwargs:
        Arguments passed to the optimizer class
        See ~smac.facade.smac_facade.SMAC

    Returns
    -------
    x : list
        Estimated position of the minimum.
    f : float
        Value of `func` at the minimum.
    s : :class:`smac.facade.smac_hpo_facade.SMAC4HPO`
        SMAC objects which enables the user to get
        e.g., the trajectory and runhistory.

    """
    # create configuration space
    cs = ConfigurationSpace()

    # Adjust zero padding
    tmplt = 'x{0:0' + str(len(str(len(bounds)))) + 'd}'

    for idx, (lower_bound, upper_bound) in enumerate(bounds):
        parameter = UniformFloatHyperparameter(name=tmplt.format(idx + 1),
                                               lower=lower_bound,
                                               upper=upper_bound,
                                               default_value=x0[idx])
        cs.add_hyperparameter(parameter)

    # create scenario
    scenario_dict = {
        "run_obj": "quality",
        "cs": cs,
        "deterministic": "true",
        "initial_incumbent": "DEFAULT",
    }

    if scenario_args is not None:
        scenario_dict.update(scenario_args)

    if maxfun > 0:
        scenario_dict["runcount_limit"] = maxfun
    scenario = Scenario(scenario_dict)

    # Handle optional tae  arguments
    if tae_runner_kwargs is not None:
        if 'ta' not in tae_runner_kwargs:
            tae_runner_kwargs.update({'ta': func})
    else:
        tae_runner_kwargs = {'ta': func}

    smac = SMAC4HPO(scenario=scenario,
                    tae_runner=ExecuteTAFuncArray,
                    tae_runner_kwargs=tae_runner_kwargs,
                    rng=rng,
                    **kwargs)

    smac.logger = logging.getLogger(smac.__module__ + "." +
                                    smac.__class__.__name__)
    incumbent = smac.optimize()
    config_id = smac.solver.runhistory.config_ids[incumbent]
    run_key = RunKey(config_id, None, 0)
    incumbent_performance = smac.solver.runhistory.data[run_key]
    incumbent = np.array(
        [incumbent[tmplt.format(idx + 1)] for idx in range(len(bounds))],
        dtype=np.float)
    return incumbent, incumbent_performance.cost, smac
Exemplo n.º 27
0
 def test_no_output_dir(self):
     self.test_scenario_dict['output_dir'] = ""
     scenario = Scenario(self.test_scenario_dict)
     self.assertFalse(scenario.out_writer.write_scenario_file(scenario))
Exemplo n.º 28
0
from smac.tae.execute_func import ExecuteTAFuncDict
from smac.scenario.scenario import Scenario
from smac.facade.smac_facade import SMAC

cs = ConfigurationSpace()

lr = UniformFloatHyperparameter("lr", 0.0001, 0.1, default_value=0.001)
cs.add_hyperparameter(lr)

batch_size = CategoricalHyperparameter("batch_size", [128, 256], default_value=128)
cs.add_hyperparameter(batch_size)

# Scenario object
scenario = Scenario({"run_obj": "quality",   # we optimize quality (alternatively runtime)
                     "runcount-limit": 5,  # maximum function evaluations
                     "cs": cs,               # configuration space
                     "deterministic": "true",
                     #"abort_on_first_run_crash": "false"
                     })

# Optimize, using a SMAC-object
print("Optimizing! Depending on your machine, this might take a few minutes.")
smac = SMAC(scenario=scenario, rng=np.random.RandomState(42),
        tae_runner=prepare_cnn)

print("searching for incumbent config!")

smac.solver.intensifier.tae_runner.use_pynisher = False

incumbent = smac.optimize()

inc_value = prepare_cnn(incumbent)
Exemplo n.º 29
0
 def test_Exception(self):
     with self.assertRaises(TypeError):
         s = Scenario(['a', 'b'])
Exemplo n.º 30
0
cs.add_hyperparameter(min_samples_in_leaf)

max_depth = UniformIntegerHyperparameter("max_depth", 20, 100, default=20)
cs.add_hyperparameter(max_depth)

max_num_nodes = UniformIntegerHyperparameter("max_num_nodes",
                                             100,
                                             100000,
                                             default=1000)
cs.add_hyperparameter(max_num_nodes)

# SMAC scenario oject
scenario = Scenario({
    "run_obj": "quality",  # we optimize quality (alternative runtime)
    "runcount-limit": 400,  # at most 200 function evaluations
    "cs": cs,  # configuration space
    "deterministic": "true",
    "memory_limit": 1024,
})

# Optimize
smac = SMAC(scenario=scenario, rng=np.random.RandomState(42), tae_runner=rfr)

# example call of the function
# it returns: Status, Cost, Runtime, Additional Infos
def_value = smac.solver.intensifier.tae_runner.run(
    cs.get_default_configuration(), 1)[1]
print("Default Value: %.2f" % (def_value))

try:
    incumbent = smac.optimize()