Example #1
0
def selu(x):
  """Scaled Exponential Linear Unit. (Klambauer et al., 2017).

  Arguments:
      x: A tensor or variable to compute the activation function for.

  Returns:
    Tensor with the same shape and dtype as `x`.

  References:
      - [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)
  """
  alpha = 1.6732632423543772848170429916717
  scale = 1.0507009873554804934193349852946
  return scale * K.elu(x, alpha)
Example #2
0
def selu(x):
  """Scaled Exponential Linear Unit. (Klambauer et al., 2017).

  Arguments:
      x: A tensor or variable to compute the activation function for.

  Returns:
      Tensor with the same shape and dtype as `x`.

  # Note
      - To be used together with the initialization "lecun_normal".
      - To be used together with the dropout variant "AlphaDropout".

  """
  alpha = 1.6732632423543772848170429916717
  scale = 1.0507009873554804934193349852946
  return scale * K.elu(x, alpha)
Example #3
0
def selu(x):
  """Scaled Exponential Linear Unit. (Klambauer et al., 2017).

  Arguments:
      x: A tensor or variable to compute the activation function for.

  Returns:
      Tensor with the same shape and dtype as `x`.

  # Note
      - To be used together with the initialization "lecun_normal".
      - To be used together with the dropout variant "AlphaDropout".

  """
  alpha = 1.6732632423543772848170429916717
  scale = 1.0507009873554804934193349852946
  return scale * K.elu(x, alpha)
Example #4
0
def elu(x, alpha=1.0):
  return K.elu(x, alpha)
 def call(self, inputs):
   return K.elu(inputs, self.alpha)
Example #6
0
def elu(x, alpha=1.0):
  return K.elu(x, alpha)
 def call(self, inputs):
     return K.elu(inputs, self.alpha)