Пример #1
0
 def _Activation(self, name, activation_fn_or_name=None):
     if activation_fn_or_name is None:
         activation_fn = self.activation_fn
     elif isinstance(activation_fn_or_name, str):
         activation_fn = activations.GetFn(activation_fn_or_name)
     else:
         activation_fn = activation_fn_or_name
     return builder_layers.MapLayer.Params().Set(name=name,
                                                 fn=activation_fn)
Пример #2
0
  def testSquaredReluActivation(self):
    with self.session(use_gpu=True):
      inputs = tf.constant(
          np.linspace(-5.0, 5.0, num=11, dtype='float32'), dtype=tf.float32)
      act_fn = activations.GetFn('SQUARED_RELU')

      for inp, out in [(-10.0, 0.0), (0.0, 0.0), (2.0, 4.0)]:
        self.assertEqual(out, act_fn(tf.constant(inp, dtype='float32')).eval())
      grads_squared_relu = tf.gradients(act_fn(inputs), inputs)
      grads_squared_relu = grads_squared_relu[0].eval()

      self.assertAllClose([0., 0., 0., 0., 0., 0., 2., 4., 6., 8., 10.],
                          grads_squared_relu)
Пример #3
0
 def _ApplyActivation(self, inputs, act_name):
   if act_name == 'NONE':
     return inputs
   return activations.GetFn(act_name)(inputs)