def Tanh(): r"""Returns a layer that computes the hyperbolic tangent function. .. math:: f(x) = \frac{e^x - e^{-x}}{e^x + e^{-x}} """ return Fn('Tanh', lambda x: np.tanh(x))
def forward(self, inputs, weights): x, lstm_state = inputs # LSTM state consists of c and h. c, h = jnp.split(lstm_state, 2, axis=-1) # Dense layer on the concatenation of x and h. w, b = weights y = jnp.dot(jnp.concatenate([x, h], axis=-1), w) + b # i = input_gate, j = new_input, f = forget_gate, o = output_gate i, j, f, o = jnp.split(y, 4, axis=-1) new_c = c * math.sigmoid(f) + math.sigmoid(i) * jnp.tanh(j) new_h = jnp.tanh(new_c) * math.sigmoid(o) return new_h, jnp.concatenate([new_c, new_h], axis=-1)
def forward(self, inputs, weights): x, gru_state = inputs # Dense layer on the concatenation of x and h. w1, b1, w2, b2 = weights y = jnp.dot(jnp.concatenate([x, gru_state], axis=-1), w1) + b1 # Update and reset gates. u, r = jnp.split(math.sigmoid(y), 2, axis=-1) # Candidate. c = jnp.dot(jnp.concatenate([x, r * gru_state], axis=-1), w2) + b2 new_gru_state = u * gru_state + (1 - u) * jnp.tanh(c) return new_gru_state, new_gru_state
def Tanh(x, **unused_kwargs): return np.tanh(x)
def FastGelu(x, **unused_kwargs): return 0.5 * x * (1 + np.tanh(x * 0.7978845608 * (1 + 0.044715 * x * x)))
def Tanh(): return Fn('Tanh', lambda x: np.tanh(x))
def f(x): # pylint: disable=invalid-name return 0.5 * x * (1 + np.tanh(x * 0.7978845608 * (1 + 0.044715 * x * x)))
def Tanh(x): return np.tanh(x)
def FastGelu(x): return 0.5 * x * (1 + np.tanh(x * 0.7978845608 * (1 + 0.044715 * x * x)))