def build(self, shape): units = self.units if self.units else shape[-1] self.channel_changer = tf.identity if units != self.d_model: self.channel_changer = nature.Layer(self.ai, units=units, layer_fn=self.layer_fn) super().build(shape)
def build(self, shape): dense = L.Dense(units=self.units) dense.build(shape) k_shape = dense.kernel.shape b_shape = dense.bias.shape k_size = get_size(k_shape) b_size = get_size(b_shape) self.hyper_layer = N.Layer(self.ai, units=k_size + b_size) self.reshape_kernel = L.Reshape(k_shape) self.reshape_bias = L.Reshape(b_shape) self.split = L.Lambda(lambda x: tf.split(x, [k_size, b_size])) super().build(shape)
def __init__(self, AI, units=UNITS, layer_fn=LAYER): super(Attention, self).__init__() self.memory_size = AI.pull("attn_memory_size", MEMORY_SIZE_OPTIONS) self.d_model = AI.pull("attn_d_model", D_MODEL_OPTIONS) self.n_heads = AI.pull("attn_n_heads", N_HEADS_OPTIONS) self.p_drop = AI.pull("attn_p_drop", DROP_OPTIONS) assert self.d_model % self.n_heads == 0 self.depth = self.d_model // self.n_heads self.delta = nature.Delta(AI) self.memory = self.add_weight('memory', (1, self.memory_size, self.d_model), initializer=INIT(), regularizer=REG(), trainable=False) self.dense = nature.Layer(AI, units=self.d_model, layer_fn=layer_fn) self.wq = nature.Layer(AI, units=self.d_model, layer_fn=layer_fn) self.wk = nature.Layer(AI, units=self.d_model, layer_fn=layer_fn) self.wv = nature.Layer(AI, units=self.d_model, layer_fn=layer_fn) self.layer_fn = layer_fn self.units = units self.ai = AI
def __init__(self, AI, layer_fn=LAYER_FN): super().__init__() n_layers = AI.pull("dense_n_layers", LAYERS) units = AI.pull("dense_units", UNITS) self.d_increase = units * n_layers self.concat = L.Concatenate(-1) self.layers = [] for n in range(n_layers): if isinstance(layer_fn, list): layer_fn = random.choice(layer_fn) np = nature.NormPreact(AI) super().__setattr__(f"np_{n}", np) layer = nature.Layer(AI, units=units, layer_fn=layer_fn) super().__setattr__(f"layer_{n}", layer) self.layers.append(pipe(np, layer)) self.built = True
def __init__(self, AI, units=None, layer_list=None, layer_fn=LAYER): if not layer_list: LAYERS = AI.pull('mlp_layers', MIN_LAYERS, MAX_LAYERS) UNITS = AI.pull('mlp_units', UNITS_OPTIONS) layer_list = [(UNITS, None) for _ in range(LAYERS)] super().__init__(f"{LAYERS}_layer_mlp") if units: layer_list[-1] = (units, layer_list[-1][1]) self.layers = [] for i, (units, fn) in enumerate(layer_list): fc = nature.Layer(AI, units=units, layer_fn=layer_fn) super().__setattr__(f'fc_{i}', fc) self.layers.append(fc) if fn: fn = nature.Fn(AI, key=fn) super().__setattr__(f'fn_{i}', fn) self.layers.append(fn) self.built = True
def __init__(self, units=UNITS, layer_fn=LAYER): super(Sandwich, self).__init__() self.np1 = nature.NormPreact() self.layer = nature.Layer(units, layer_fn=layer_fn) self.np2 = nature.NormPreact()