def get_configspace(seed=None): cs = CS.ConfigurationSpace(seed) # Hyperparameter defining first Conv layer kernel1 = CSH.OrdinalHyperparameter("kernel_1", sequence=[3, 5, 7], default_value=5) channels1 = CSH.UniformIntegerHyperparameter("channels_1", lower=3, upper=64, default_value=32) stride1 = CSH.UniformIntegerHyperparameter("stride_1", lower=1, upper=2, default_value=1) cs.add_hyperparameters([kernel1, channels1, stride1]) # Hyperparameter defining second Conv layer kernel2 = CSH.OrdinalHyperparameter("kernel_2", sequence=[3, 5, 7], default_value=5) channels2 = CSH.UniformIntegerHyperparameter("channels_2", lower=3, upper=64, default_value=32) stride2 = CSH.UniformIntegerHyperparameter("stride_2", lower=1, upper=2, default_value=1) cs.add_hyperparameters([kernel2, channels2, stride2]) # Hyperparameter for FC layer hidden = CSH.UniformIntegerHyperparameter("hidden", lower=32, upper=256, log=True, default_value=128) cs.add_hyperparameter(hidden) # Regularization Hyperparameter dropout = CSH.UniformFloatHyperparameter("dropout", lower=0, upper=0.5, default_value=0.1) cs.add_hyperparameter(dropout) # Training Hyperparameters batch_size = CSH.OrdinalHyperparameter("batch_size", sequence=[2, 4, 8, 16, 32, 64], default_value=4) lr = CSH.UniformFloatHyperparameter("lr", lower=1e-6, upper=0.1, log=True, default_value=1e-3) cs.add_hyperparameters([batch_size, lr]) return cs
def setUp(self) -> None: self.config_space = CS.ConfigurationSpace() lb, ub = 1, 100 self.config_space.add_hyperparameter( CSH.UniformFloatHyperparameter('f', lower=lb, upper=ub)) self.config_space.add_hyperparameter( CSH.UniformFloatHyperparameter('fq', lower=lb, upper=ub, q=0.5)) self.config_space.add_hyperparameter( CSH.UniformFloatHyperparameter('fql', lower=lb, upper=ub, q=0.5, log=True)) self.config_space.add_hyperparameter( CSH.UniformFloatHyperparameter('fl', lower=lb, upper=ub, log=True)) self.config_space.add_hyperparameter( CSH.UniformIntegerHyperparameter('i', lower=lb, upper=ub)) self.config_space.add_hyperparameter( CSH.UniformIntegerHyperparameter('il', lower=lb, upper=ub, log=True)) self.config_space.add_hyperparameter( CSH.CategoricalHyperparameter('c', choices=['x', 'y', 'z'])) self.config_space.add_hyperparameter( CSH.OrdinalHyperparameter('o', sequence=list(range(1, 101)), meta={ 'lower': 1, 'upper': 100, 'log': False })) self.config_space.add_hyperparameter( CSH.OrdinalHyperparameter('ol', sequence=[1, 10, 100], meta={ 'lower': 1, 'upper': 100, 'log': True })) self.hp_names = self.config_space.get_hyperparameter_names() self.is_categoricals = { hp_name: self.config_space.get_hyperparameter( hp_name).__class__.__name__ == 'CategoricalHyperparameter' for hp_name in self.hp_names } self.is_ordinals = { hp_name: self.config_space.get_hyperparameter( hp_name).__class__.__name__ == 'OrdinalHyperparameter' for hp_name in self.hp_names }
def get_configspace(): """ It builds the configuration space with the needed hyperparameters. It is easily possible to implement different types of hyperparameters. Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter. :return: ConfigurationsSpace-Object """ cs = CS.ConfigurationSpace() cs.add_hyperparameters([ CSH.UniformFloatHyperparameter( 'optimizer:lr', lower=0.001, upper=0.01, log=True, ), CSH.UniformIntegerHyperparameter( 'model:temp_reg_params.history', lower=4, upper=12, default_value=12, ), CSH.UniformIntegerHyperparameter( 'model:temp_reg_params.n_layers', lower=2, upper=8, default_value=3, ), CSH.OrdinalHyperparameter( 'model:temp_reg_params.n_channels', sequence=[2, 4, 8, 16, 32], default_value=8, ), CSH.OrdinalHyperparameter( 'model:temp_reg_params.kernel_size', sequence=[1, 3, 5], default_value=1, ), CSH.CategoricalHyperparameter( 'model:temp_reg_params.activation', choices="ReLU ELU LeakyReLU SELU".split(), ), CSH.CategoricalHyperparameter( 'model:biases_type.location', choices="LxT L+T".split(), ), CSH.CategoricalHyperparameter( 'model:biases_type.weekday', choices=["", "W", "WxT"], ), CSH.CategoricalHyperparameter( 'model:biases_type.month', choices=["", "M", "MxT"], ), ]) return cs
def test_add_good_dim(self): from deephyper.problem import HpProblem pb = HpProblem() p0 = pb.add_hyperparameter((-10, 10), "p0") p0_csh = csh.UniformIntegerHyperparameter( name="p0", lower=-10, upper=10, log=False ) assert p0 == p0_csh p1 = pb.add_hyperparameter((1, 100, "log-uniform"), "p1") p1_csh = csh.UniformIntegerHyperparameter(name="p1", lower=1, upper=100, log=True) assert p1 == p1_csh p2 = pb.add_hyperparameter((-10.0, 10.0), "p2") p2_csh = csh.UniformFloatHyperparameter( name="p2", lower=-10.0, upper=10.0, log=False ) assert p2 == p2_csh p3 = pb.add_hyperparameter((1.0, 100.0, "log-uniform"), "p3") p3_csh = csh.UniformFloatHyperparameter( name="p3", lower=1.0, upper=100.0, log=True ) assert p3 == p3_csh p4 = pb.add_hyperparameter([1, 2, 3, 4], "p4") p4_csh = csh.OrdinalHyperparameter(name="p4", sequence=[1, 2, 3, 4]) assert p4 == p4_csh p5 = pb.add_hyperparameter([1.0, 2.0, 3.0, 4.0], "p5") p5_csh = csh.OrdinalHyperparameter(name="p5", sequence=[1.0, 2.0, 3.0, 4.0]) assert p5 == p5_csh p6 = pb.add_hyperparameter(["cat0", "cat1"], "p6") p6_csh = csh.CategoricalHyperparameter(name="p6", choices=["cat0", "cat1"]) assert p6 == p6_csh p7 = pb.add_hyperparameter({"mu": 0, "sigma": 1}, "p7") p7_csh = csh.NormalIntegerHyperparameter(name="p7", mu=0, sigma=1) assert p7 == p7_csh if cs.__version__ > "0.4.20": p8 = pb.add_hyperparameter( {"mu": 0, "sigma": 1, "lower": -5, "upper": 5}, "p8" ) p8_csh = csh.NormalIntegerHyperparameter( name="p8", mu=0, sigma=1, lower=-5, upper=5 ) assert p8 == p8_csh p9 = pb.add_hyperparameter({"mu": 0.0, "sigma": 1.0}, "p9") p9_csh = csh.NormalFloatHyperparameter(name="p9", mu=0, sigma=1) assert p9 == p9_csh
def check_hyperparameter(parameter, name=None, default_value=None): if isinstance(parameter, csh.Hyperparameter): return parameter if not isinstance(parameter, (list, tuple, np.ndarray)): raise ValueError( "Shortcut definition of an hyper-parameter has to be a list, tuple, array." ) if not (type(name) is str): raise dh_exceptions.problem.SpaceDimNameOfWrongType(name) kwargs = {} if default_value is not None: kwargs["default_value"] = default_value if type(parameter) is tuple: # Range of reals or integers if len(parameter) == 2: prior = "uniform" elif len(parameter) == 3: prior = parameter[2] assert prior in [ "uniform", "log-uniform", ], f"Prior has to be 'uniform' or 'log-uniform' when {prior} was given for parameter '{name}'" parameter = parameter[:2] log = prior == "log-uniform" if all([isinstance(p, int) for p in parameter]): return csh.UniformIntegerHyperparameter(name=name, lower=parameter[0], upper=parameter[1], log=log, **kwargs) elif any([isinstance(p, float) for p in parameter]): return csh.UniformFloatHyperparameter(name=name, lower=parameter[0], upper=parameter[1], log=log, **kwargs) elif type(parameter) is list: # Categorical if any([ isinstance(p, (str, bool)) or isinstance(p, np.bool_) for p in parameter ]): return csh.CategoricalHyperparameter(name, choices=parameter, **kwargs) elif all([isinstance(p, (int, float)) for p in parameter]): return csh.OrdinalHyperparameter(name, sequence=parameter) raise ValueError( f"Invalid dimension {name}: {parameter}. Read the documentation for" f" supported types.")
def get_configspace(default_config, test_mode, leaf, lr, tree): """ It builds the configuration space with the needed hyperparameters. It is easily possible to implement different types of hyperparameters. Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter. :return: ConfigurationsSpace-Object """ cs = CS.ConfigurationSpace() # This is the good one! n_leaves = CSH.UniformIntegerHyperparameter('n_leaves', lower=5, upper=100, default_value=10, q=5, log=False) learning_rate = CSH.UniformFloatHyperparameter('learning_rate', lower=0.01, upper=0.5, default_value=0.1, q=0.01, log=False) n_trees = CSH.UniformIntegerHyperparameter('n_trees', lower=100, upper=2000, default_value=1000, q=50, log=False) # # Increase spectrum: insane values: 5 328 000 000 total configs!! # n_leaves = CSH.UniformIntegerHyperparameter('n_leaves', lower=1, upper=200, default_value=10, q=1, log=False) # learning_rate = CSH.UniformFloatHyperparameter('learning_rate', lower=0.001, upper=0.9, default_value=0.1, q=0.001, log=False) # n_trees = CSH.UniformIntegerHyperparameter('n_trees', lower=10, upper=3000, default_value=1000, q=1 ,log=False) # n_leaves = CSH.UniformIntegerHyperparameter('n_leaves', lower=1, upper=2, default_value=2, log=False) # learning_rate = CSH.UniformFloatHyperparameter('learning_rate', lower=0.1, upper=0.2, default_value=0.1, q=0.1, log=False) # n_trees = CSH.UniformIntegerHyperparameter('n_trees', lower=1, upper=2, default_value=1, q=1, log=False) if default_config: n_leaves = CSH.OrdinalHyperparameter('n_leaves', sequence=[10]) learning_rate = CSH.OrdinalHyperparameter('learning_rate', sequence=[0.1]) n_trees = CSH.OrdinalHyperparameter('n_trees', sequence=[1000]) if test_mode: n_leaves = CSH.OrdinalHyperparameter('n_leaves', sequence=[leaf]) learning_rate = CSH.OrdinalHyperparameter('learning_rate', sequence=[lr]) n_trees = CSH.OrdinalHyperparameter('n_trees', sequence=[tree]) cs.add_hyperparameters([n_leaves, learning_rate, n_trees]) return cs
def get_configspace(): """ It builds the configuration space with the needed hyperparameters. It is easily possible to implement different types of hyperparameters. Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter. :return: ConfigurationsSpace-Object """ cs = CS.ConfigurationSpace() cs.add_hyperparameters([ CSH.UniformFloatHyperparameter( 'optimizer:lr', lower=0.001, upper=0.1, default_value=0.04, log=True, ), # CSH.OrdinalHyperparameter( # 'ignite_random:minibatch_size', # sequence=[2, 4, 8, 16, 32], # default_value=8, # ), # CSH.OrdinalHyperparameter( # 'ignite_random:num_minibatches', # sequence=[2, 4, 8, 16, 32], # default_value=8, # ), CSH.UniformIntegerHyperparameter( 'model:history', lower=1, upper=12, default_value=12, ), CSH.UniformIntegerHyperparameter( 'model:n_layers', lower=2, upper=8, default_value=3, ), CSH.OrdinalHyperparameter( 'model:n_channels', sequence=[2, 4, 8, 16, 32, 64], default_value=8, ), ]) return cs
def setUp(self) -> None: dim = 10 self.cs_cat = CS.ConfigurationSpace() self.cs = CS.ConfigurationSpace() self.cs_cat.add_hyperparameter( CSH.CategoricalHyperparameter('func', choices=['sine', 'cosine'])) for d in range(dim): self.cs.add_hyperparameter( CSH.UniformFloatHyperparameter(f'x{d}', lower=-5, upper=5)) if d < dim - 1: self.cs_cat.add_hyperparameter( CSH.UniformFloatHyperparameter(f'x{d}', lower=-5, upper=5)) else: self.cs_cat.add_hyperparameter( CSH.OrdinalHyperparameter(f'x{d}', sequence=list(range(-5, 6)), meta={ 'lower': -5, 'upper': 5 })) self.hp_names = list(self.cs._hyperparameters.keys()) self.hp_names_cat = list(self.cs_cat._hyperparameters.keys()) self.logger = get_logger(file_name='test', logger_name='test')
], default_value="it2,jt2,kt2") p2 = CSH.CategoricalHyperparameter(name='p2', choices=[ "it3,jt3,kt3", "jt3,kt3,it3", "kt3,it3,jt3", "kt3,jt3,it3", "it3,kt3,jt3", "jt3,it3,kt3" ], default_value="it3,jt3,kt3") seq = [ '4', '8', '16', '20', '32', '50', '64', '80', '96', '100', '128', '192', '256', '384', '512', '768', '1024', '1536', '2048' ] p3 = CSH.OrdinalHyperparameter(name='p3', sequence=seq, default_value='96') p4 = CSH.OrdinalHyperparameter(name='p4', sequence=seq, default_value='96') p5 = CSH.OrdinalHyperparameter(name='p5', sequence=seq, default_value='96') p6 = CSH.OrdinalHyperparameter(name='p6', sequence=seq, default_value='96') p7 = CSH.OrdinalHyperparameter(name='p7', sequence=seq, default_value='96') p8 = CSH.OrdinalHyperparameter(name='p8', sequence=seq, default_value='96') p9 = CSH.OrdinalHyperparameter(name='p9', sequence=seq, default_value='96') p10 = CSH.OrdinalHyperparameter(name='p10', sequence=seq, default_value='96') p11 = CSH.OrdinalHyperparameter(name='p11', sequence=seq, default_value='96') cs.add_hyperparameters([p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11]) # problem space task_space = None
def get_configspace(): """ It builds the configuration space with the needed hyperparameters. It is easily possible to implement different types of hyperparameters. Beside float-hyperparameters on a log scale, it is also able to handle categorical input parameter. :return: ConfigurationsSpace-Object hp_config = { 'dropout_rate': tune.quniform(0.01, 0.2, 0.02), # truncated normal from 0 to 0.2, with 0.01 increment 'tfbs_kernel_size': tune.choice([32, 64, 128, 256, 512]), 'tfbs_kernel_length': tune.choice([10, 25, 40]), 'dilation_level': tune.choice([6, 7, 8, 9, 10, ]), 'dilation_kernel_length': tune.randint(2, 6), 'compression_level': tune.choice([4, 5, 6, 7]), 'compression_kernel_size': tune.choice([64, 128, 256]), 'learning_rate': tune.qloguniform(0.0001, 0.1, 0.0001), } """ cs = CS.ConfigurationSpace() dropout_rate = CSH.UniformFloatHyperparameter('dropout_rate', lower=0.01, upper=0.1, default_value=0.05, log=False, q=0.01) tfbs_filter_size = CSH.OrdinalHyperparameter('tfbs_filter_size', sequence=[32, 64, 128]) tfbs_kernel_length = CSH.OrdinalHyperparameter('tfbs_kernel_length', sequence=[10, 25]) dilation_level = CSH.OrdinalHyperparameter('dilation_level', sequence=[4, 6, 8, 10]) dilation_kernel_length = CSH.UniformIntegerHyperparameter( 'dilation_kernel_length', lower=2, upper=6, default_value=2, log=False) compression_filter_size = CSH.OrdinalHyperparameter( 'compression_filter_size', sequence=[64, 128, 256]) learning_rate = CSH.UniformFloatHyperparameter('learning_rate', lower=0.0001, upper=0.01, default_value=0.001, log=True) # cs.add_hyperparameters([ dropout_rate, tfbs_filter_size, tfbs_kernel_length, dilation_level, dilation_kernel_length, compression_filter_size, learning_rate ]) return cs
def create_config_space( hidden_layers: bool = True, scaling: bool = True, learning: bool = True, loss: bool = True, easing: bool = True, activation_functions: bool = True, dropout: bool = True, activity_regularizer: bool = True, min_dropout_rate_input: float = 0.0, min_dropout_rate_hidden_layers: float = 0.0, min_dropout_rate_output: float = 0.0, max_dropout_rate_input: float = 0.99, max_dropout_rate_hidden_layers: float = 0.99, max_dropout_rate_output: float = 0.99, ): config_space = cs.ConfigurationSpace(seed=1234) config_space.add_hyperparameters([ csh.Constant('input_dimension', value=1025), csh.UniformIntegerHyperparameter('latent_dimension', lower=10, upper=1024, log=True), ]) if hidden_layers: config_space.add_hyperparameters([ csh.OrdinalHyperparameter('number_of_hidden_layers', list(range(1, 11, 2))), ]) if scaling: config_space.add_hyperparameters([ csh.CategoricalHyperparameter('_scaler', [ 'none', 'min_max', 'std', ]), ]) if learning: config_space.add_hyperparameters([ csh.UniformIntegerHyperparameter('_batch_size', lower=16, upper=512), csh.UniformFloatHyperparameter('learning_rate', lower=1e-6, upper=1e-1, log=True), csh.UniformFloatHyperparameter('learning_rate_decay', lower=1e-8, upper=1e-2, log=True), ]) if loss: config_space.add_hyperparameters([ csh.CategoricalHyperparameter( 'loss', ['mse', 'mae', 'binary_crossentropy']), ]) if easing: config_space.add_hyperparameters([ csh.CategoricalHyperparameter( 'easing', ['ease_linear', 'ease_in_quad', 'ease_out_quad']), ]) if activation_functions: config_space.add_hyperparameters([ csh.CategoricalHyperparameter( 'hidden_layer_activations', ['relu', 'linear', 'sigmoid', 'tanh']), csh.CategoricalHyperparameter( 'output_layer_activation', ['relu', 'linear', 'sigmoid', 'tanh']), ]) if dropout: config_space.add_hyperparameters([ csh.UniformFloatHyperparameter('dropout_rate_input', lower=min_dropout_rate_input, upper=max_dropout_rate_input), csh.UniformFloatHyperparameter( 'dropout_rate_hidden_layers', lower=min_dropout_rate_hidden_layers, upper=max_dropout_rate_hidden_layers), csh.UniformFloatHyperparameter('dropout_rate_output', lower=min_dropout_rate_output, upper=max_dropout_rate_output), ]) if activity_regularizer: config_space.add_hyperparameters([ csh.CategoricalHyperparameter('activity_regularizer', ['l1', 'l2']), csh.UniformFloatHyperparameter('l1_activity_regularizer_factor', lower=1e-6, upper=1e-1, default_value=1e-2, log=True), csh.UniformFloatHyperparameter('l2_activity_regularizer_factor', lower=1e-6, upper=1e-1, default_value=1e-2, log=True), ]) config_space.add_condition( cs.EqualsCondition( config_space.get_hyperparameter( 'l1_activity_regularizer_factor'), config_space.get_hyperparameter('activity_regularizer'), 'l1')) config_space.add_condition( cs.EqualsCondition( config_space.get_hyperparameter( 'l2_activity_regularizer_factor'), config_space.get_hyperparameter('activity_regularizer'), 'l2')) return config_space
def _get_ordinal(self, param, cd): values = cd[param] return CSH.OrdinalHyperparameter(param, values)
name='p1', choices=[ "#pragma clang loop(i1) pack array(A) allocate(malloc)", "#pragma clang loop(i1) pack array(B) allocate(malloc)", " " ], default_value=' ') p2 = CSH.CategoricalHyperparameter( name='p2', choices=[ "#pragma clang loop(i1,j1,k1,i2,j2) interchange permutation(j1,k1,i1,j2,i2)", " " ], default_value=' ') p3 = CSH.OrdinalHyperparameter(name='p3', sequence=[ '4', '8', '16', '20', '32', '50', '64', '80', '96', '100', '128' ], default_value='96') p4 = CSH.OrdinalHyperparameter(name='p4', sequence=[ '4', '8', '16', '20', '32', '50', '64', '80', '100', '128', '2048' ], default_value='2048') p5 = CSH.OrdinalHyperparameter(name='p5', sequence=[ '4', '8', '16', '20', '32', '50', '64', '80', '100', '128', '256' ], default_value='256')
], default_value="it1,jt1,kt1") p1 = CSH.CategoricalHyperparameter(name='p1', choices=[ "it2,jt2,kt2", "jt2,kt2,it2", "kt2,it2,jt2", "kt2,jt2,it2", "it2,kt2,jt2", "jt2,it2,kt2" ], default_value="it2,jt2,kt2") seq = [ '4', '8', '16', '20', '32', '50', '64', '80', '96', '100', '128', '192', '256', '384', '512', '768', '1024', '1536', '2048' ] p2 = CSH.OrdinalHyperparameter(name='p2', sequence=seq, default_value='96') p3 = CSH.OrdinalHyperparameter(name='p3', sequence=seq, default_value='96') p4 = CSH.OrdinalHyperparameter(name='p4', sequence=seq, default_value='96') p5 = CSH.OrdinalHyperparameter(name='p5', sequence=seq, default_value='96') p6 = CSH.OrdinalHyperparameter(name='p6', sequence=seq, default_value='96') p7 = CSH.OrdinalHyperparameter(name='p7', sequence=seq, default_value='96') #p0= CSH.CategoricalHyperparameter(name='p0', choices=["//#pragma clang loop(jl2) pack array(A) allocate(malloc)", " "], default_value=' ') #p1= CSH.CategoricalHyperparameter(name='p1', choices=["//#pragma clang loop(il2) pack array(A) allocate(malloc)", " "], default_value=' ') #cs.add_hyperparameters([p0, p1, p2, p3, p4]) cs.add_hyperparameters([p0, p1, p2, p3, p4, p5, p6, p7]) # problem space task_space = None
def check_hyperparameter(parameter, name=None, default_value=None): """Check if the passed parameter is a valid description of an hyperparameter. :meta private: Args: parameter (str|Hyperparameter): an instance of ``ConfigSpace.hyperparameters.hyperparameter`` or a synthetic description (e.g., ``list``, ``tuple``). parameter (str): the name of the hyperparameter. Only required when the parameter is not a ``ConfigSpace.hyperparameters.hyperparameter``. default_value: a default value for the hyperparameter. Returns: Hyperparameter: the ConfigSpace hyperparameter instance corresponding to the ``parameter`` description. """ if isinstance(parameter, csh.Hyperparameter): return parameter if not isinstance(parameter, (list, tuple, np.ndarray, dict)): raise ValueError( "Shortcut definition of an hyper-parameter has to be a list, tuple, array or dict." ) if not (type(name) is str): raise ValueError( "The 'name' of an hyper-parameter should be a string!") kwargs = {} if default_value is not None: kwargs["default_value"] = default_value if type(parameter) is tuple: # Range of reals or integers if len(parameter) == 2: prior = "uniform" elif len(parameter) == 3: prior = parameter[2] assert prior in [ "uniform", "log-uniform", ], f"Prior has to be 'uniform' or 'log-uniform' when {prior} was given for parameter '{name}'" parameter = parameter[:2] log = prior == "log-uniform" if all([isinstance(p, int) for p in parameter]): return csh.UniformIntegerHyperparameter(name=name, lower=parameter[0], upper=parameter[1], log=log, **kwargs) elif any([isinstance(p, float) for p in parameter]): return csh.UniformFloatHyperparameter(name=name, lower=parameter[0], upper=parameter[1], log=log, **kwargs) elif type(parameter) is list: # Categorical if any([ isinstance(p, (str, bool)) or isinstance(p, np.bool_) for p in parameter ]): return csh.CategoricalHyperparameter(name, choices=parameter, **kwargs) elif all([isinstance(p, (int, float)) for p in parameter]): return csh.OrdinalHyperparameter(name, sequence=parameter, **kwargs) elif type(parameter) is dict: # Integer or Real distribution # Normal if "mu" in parameter and "sigma" in parameter: if type(parameter["mu"]) is float: return csh.NormalFloatHyperparameter(name=name, **parameter, **kwargs) elif type(parameter["mu"]) is int: return csh.NormalIntegerHyperparameter(name=name, **parameter, **kwargs) else: raise ValueError( "Wrong hyperparameter definition! 'mu' should be either a float or an integer." ) raise ValueError( f"Invalid dimension {name}: {parameter}. Read the documentation for" f" supported types.")
import sys import time import json import math import ConfigSpace as CS import ConfigSpace.hyperparameters as CSH from skopt.space import Real, Integer, Categorical HERE = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(1, os.path.dirname(HERE)+ '/plopper') from plopper import Plopper cs = CS.ConfigurationSpace(seed=1234) # number of threads p0= CSH.OrdinalHyperparameter(name='p0', sequence=['4','5','6','7','8'], default_value='8') #block size for openmp dynamic schedule p1= CSH.OrdinalHyperparameter(name='p1', sequence=['100','200','400','640','800','1000','1280','1600','2000'], default_value='1000') p2= CSH.OrdinalHyperparameter(name='p2', sequence=['10','20','40','64','80','100','128','160','200'], default_value='100') #omp parallel p3= CSH.CategoricalHyperparameter(name='p3', choices=["#pragma omp parallel for", " "], default_value=' ') #thread affinity type p4= CSH.CategoricalHyperparameter(name='p4', choices=['compact','scatter','balanced','none','disabled', 'explicit'], default_value='none') # omp placement p5= CSH.CategoricalHyperparameter(name='p5', choices=['cores','threads','sockets'], default_value='cores') cs.add_hyperparameters([p0, p1, p2, p3, p4, p5]) # problem space task_space = None
import json import math import ConfigSpace as CS import ConfigSpace.hyperparameters as CSH from skopt.space import Real, Integer, Categorical HERE = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(1, os.path.dirname(HERE) + '/plopper') from plopper import Plopper cs = CS.ConfigurationSpace(seed=1234) #batch_size p0 = CSH.OrdinalHyperparameter(name='p0', sequence=[ '16', '32', '64', '100', '128', '200', '256', '300', '400', '512' ], default_value='128') #epochs p1 = CSH.OrdinalHyperparameter( name='p1', sequence=['1', '2', '4', '8', '12', '16', '20', '22', '24', '30'], default_value='20') #dropout rate p2 = CSH.OrdinalHyperparameter(name='p2', sequence=['0.1', '0.15', '0.2', '0.25', '0.4'], default_value='0.2') #optimizer p3 = CSH.CategoricalHyperparameter(name='p3', choices=[ 'rmsprop', 'adam', 'sgd', 'adamax',
import sys import ConfigSpace as CS import ConfigSpace.hyperparameters as CSH from skopt.space import Real, Integer, Categorical cs = CS.ConfigurationSpace(seed=1234) p1 = CSH.CategoricalHyperparameter(name='p1', choices=['None', '#pragma omp #p3', '#pragma omp target #p3', '#pragma omp target #p5', '#pragma omp #p4']) p3 = CSH.CategoricalHyperparameter(name='p3', choices=['None', '#parallel for #p4', '#parallel for #p6', '#parallel for #p7']) p4 = CSH.CategoricalHyperparameter(name='p4', choices=['None', 'simd']) p5 = CSH.CategoricalHyperparameter(name='p5', choices=['None', '#dist_schedule static', '#dist_schedule #p11']) p6 = CSH.CategoricalHyperparameter(name='p6', choices=['None', '#schedule #p10', '#schedule #p11']) p7 = CSH.CategoricalHyperparameter(name='p7', choices=['None', '#numthreads #p12']) p10 = CSH.CategoricalHyperparameter(name='p10', choices=['static', 'dynamic']) p11 = CSH.OrdinalHyperparameter(name='p11', sequence=['1', '8', '16']) p12 = CSH.OrdinalHyperparameter(name='p12', sequence=['1', '8', '16']) cs.add_hyperparameters([p1, p3, p4, p5, p6, p7, p10, p11, p12]) #make p3 an active parameter when p1 value is ... cond0 = CS.EqualsCondition(p3, p1, '#pragma omp #p3') cond1 = CS.EqualsCondition(p3, p1, '#pragma omp target #p3') cond2 = CS.EqualsCondition(p5, p1, '#pragma omp target #p5') cond3 = CS.EqualsCondition(p4, p1, '#pragma omp #p4') cond4 = CS.EqualsCondition(p4, p3, '#parallel for #p4') cond5 = CS.EqualsCondition(p6, p3, '#parallel for #p6') cond6 = CS.EqualsCondition(p7, p3, '#parallel for #p7')