def __init__( self, featurewise_center: Boolean(), samplewise_center: Boolean(), featurewise_std_normalization: Boolean(), samplewise_std_normalization: Boolean(), rotation_range: Discrete(0, 15), width_shift_range: Continuous(0, 0.25), height_shift_range: Continuous(0, 0.25), shear_range: Continuous(0, 15), zoom_range: Continuous(0, 0.25), horizontal_flip: Boolean(), vertical_flip: Boolean(), ): super().__init__( featurewise_center=featurewise_center, samplewise_center=samplewise_center, featurewise_std_normalization=featurewise_std_normalization, samplewise_std_normalization=samplewise_std_normalization, rotation_range=rotation_range, width_shift_range=width_shift_range, height_shift_range=height_shift_range, shear_range=shear_range, zoom_range=zoom_range, horizontal_flip=horizontal_flip, vertical_flip=vertical_flip, )
def __init__( self, filters: Discrete(2, 8), kernel_size: Categorical(3, 5, 7), l1: Continuous(0, 1e-3), l2: Continuous(0, 1e-3), ): self.l1 = l1 self.l2 = l2 super().__init__( filters=2**filters, kernel_size=(kernel_size, kernel_size), kernel_regularizer=regularizers.l1_l2(l1=l1, l2=l2), padding="same", data_format="channels_last", )
def __init__( self, dm: Discrete(min=0, max=2), dbow_words: Discrete(min=-100, max=100), dm_concat: Discrete(min=-100, max=100), dm_tag_count: Discrete(min=0, max=2), alpha: Continuous(min=0.001, max=0.075), epochs: Discrete(min=2, max=10), window: Discrete(min=2, max=10), inner_tokenizer: algorithm(Sentence(), List(Word())), inner_stemmer: algorithm(Word(), Stem()), inner_stopwords: algorithm(List(Word()), List(Word())), lowercase: Boolean(), stopwords_remove: Boolean(), ): self.inner_tokenizer = inner_tokenizer self.inner_stemmer = inner_stemmer self.inner_stopwords = inner_stopwords self.lowercase = lowercase self.stopwords_remove = stopwords_remove super().__init__( dm=dm, dbow_words=dbow_words, dm_concat=dm_concat, dm_tag_count=dm_tag_count, alpha=alpha, epochs=epochs, window=window, )
def __init__( self, units: Discrete(32, 1024), activation: Categorical("tanh", "sigmoid", "relu", "linear"), recurrent_activation: Categorical("tanh", "sigmoid", "relu", "linear"), dropout: Continuous(0, 0.5), recurrent_dropout: Continuous(0, 0.5), ): super().__init__( units=units, activation=activation, recurrent_activation=recurrent_activation, dropout=dropout, recurrent_dropout=recurrent_dropout, return_sequences=False, )
def _get_arg_values(arg, value, cls): if isinstance(value, bool): return Boolean() if isinstance(value, int): return Discrete(*_get_integer_values(arg, value, cls)) if isinstance(value, float): return Continuous(*_get_float_values(arg, value, cls)) if isinstance(value, str): values = _find_parameter_values(arg, cls) return Categorical(*values) if values else None return None
def __init__( self, merge_mode: Categorical("sum", "mul", "concat", "ave"), units: Discrete(32, 1024), activation: Categorical("tanh", "sigmoid", "relu", "linear"), recurrent_activation: Categorical("tanh", "sigmoid", "relu", "linear"), dropout: Continuous(0, 0.5), recurrent_dropout: Continuous(0, 0.5), ): super().__init__( layer=_LSTM( units=units, activation=activation, recurrent_activation=recurrent_activation, dropout=dropout, recurrent_dropout=recurrent_dropout, return_sequences=False, ), merge_mode=merge_mode, )
def _get_float_values(arg, value, cls): if value in [inf, nan]: return None if value > 0: min_value = -10 * value max_value = 10 * value elif value == 0: min_value = -1 max_value = 1 else: return None # binary search for minimum value left = min_value right = value while abs(left - right) > 1e-2: current_value = round((left + right) / 2, 3) if _try(cls, arg, current_value): right = current_value else: left = current_value min_value = right # binary search for maximum value left = value right = max_value while abs(left - right) > 1e-2: current_value = round((left + right) / 2, 3) if _try(cls, arg, current_value): left = current_value else: right = current_value max_value = left if max_value - min_value >= 2 * value: return Continuous(min=min_value, max=max_value) return None
def __init__(self, rate: Continuous(0, 0.5)): super().__init__(rate=rate)
def __init__(self, penalty: Categorical("l1", "l2"), C: Continuous(0.1, 10)): super().__init__(penalty=penalty, C=C, solver="liblinear")
def __init__(self, var_smoothing: Continuous(1e-10, 0.1)): super().__init__(var_smoothing=var_smoothing)
def __init__( self, kernel: Categorical("rbf", "linear", "poly"), C: Continuous(0.1, 10) ): super().__init__(C=C, kernel=kernel)
def __init__(self, x: Continuous(-1, 1), y: Continuous(-1, 1), z: Continuous(-1, 1)): self.x = x self.y = y self.z = z
def __init__(self, kernel: Categorical("rbf", "linear", "poly"), reg: Continuous(0.1, 10)): super().__init__(C=reg, kernel=kernel) self.kernel = kernel self.reg = reg
def __init__(self, penalty: Categorical("l1", "l2"), reg: Continuous(0.1, 10)): super().__init__(penalty=penalty, C=reg, solver="liblinear") self.penalty = penalty self.reg = reg