def Fn(AI, key=DEFAULT): if key is None: key = DEFAULT # key = AI.pull("fn", list(LAYERS.keys()) + list(FNS.keys())) if key in LAYERS.keys(): return LAYERS[key]() return L.Activation(clean_activation(key.lower()), name=make_id(key))
def pull(self, *args, log_uniform=False, id=False): args = list(args) assert isinstance(args[0], str) args[0] = make_id(args[0]) if id else args[0] # opt = trial.suggest_categorical('optimizer', ['MomentumSGD', 'Adam']) if isinstance(args[1], list): return self.log_and_return( args, self.trial.suggest_categorical(*args)) # num_layers = trial.suggest_int('num_layers', 1, 3) elif isinstance(args[1], int) and isinstance(args[2], int): return self.log_and_return(args, self.trial.suggest_int(*args)) # learning_rate = trial.suggest_loguniform('learning_rate', 1e-5, 1e-2) # dropout_rate = trial.suggest_uniform('dropout_rate', 0.0, 1.0) elif isinstance(args[1], float) and isinstance(args[2], float): if log_uniform: return self.log_and_return( args, self.trial.suggest_loguniform(*args)) return self.log_and_return(args, self.trial.suggest_uniform(*args)) # rate = trial.suggest_discrete_uniform('rate', 0.0, 1.0, 0.1) elif len(args) is 4: return self.log_and_return( args, self.trial.suggest_discrete_uniform(*args)) else: log("FAILED TO PULL FOR ARGS", args, color="red") raise Exception("AI.Pull failed")
def __init__(self, AI, out_shape): self.ensemble_size = AI.pull('regresser_ensemble_size', SIZE_OPTIONS) first = AI.pull("regresser_first", FIRST_OPTIONS) self.first = getattr(nature, first) super(Regresser, self).__init__( name=make_id(f"{self.ensemble_size}X_{first}_regresser")) self.out_shape = out_shape self.ai = AI
def __init__(self, layer_fn=OPTIONS, n=N): """ layer_fn: callable returning the layer to use """ if isinstance(layer_fn, list): layer_fn = random.choice(layer_fn) key = make_id(f"{layer_fn.__name__}_block") super(ResBlock, self).__init__(name=key) self.layer_fn = layer_fn self.n = N
def __init__(self, AI, units=UNITS): layers = AI.pull("AOA_layers", ["monolayer", "bilayer"]) super().__init__(name=make_id(f"{layers}_attn")) self.call = self.call_monolayer if bilayer: self.call = self.call_bilayer self.q2 = L.Dense(units) self.k2 = L.Dense(units) self.v2 = L.Dense(units) self.attn = L.Attention() self.q1 = L.Dense(units) self.k1 = L.Dense(units) self.v1 = L.Dense(units) self.built = True
def Input(spec, batch_size=1, drop_batch_dim=False): shape, format = spec["shape"], spec["format"] # if tf.is_tensor(tensor_or_shape): # shape = tensor_or_shape.shape # else: # shape = tensor_or_shape # if drop_batch_dim: # shape = shape[1:] shape_string = 'x'.join([str(n) for n in list(shape)]) name = make_id(f"{batch_size}x{shape_string}_{format}") return tf.keras.Input(shape, batch_size=batch_size, dtype=tf.float32, name=name)
def __init__(self, AI, units=None): n = AI.pull(f"conv_set_n", N) super().__init__(name=make_id(f"conv_set_{n}")) self.call = self.call_for_two if n is 2 else self.call_for_three self.ai = AI
def __init__(self, AI, units=None): self.units = AI.pull(f"echo_units", UNITS) super().__init__(name=make_id(f"echo_{self.units}")) self.ai = AI
def __init__(self, AI, units=None): self.N = AI.pull("slim_n", N_OPTIONS) super().__init__(name=make_id(f"slim_{self.N}")) self.ai = AI
def __init__(self, units=None): name = make_id(f"slim_{N}") super(Slim, self).__init__(name=name) self.units = units