Пример #1
0
def _activate(x, activation, input_size, verbose=2, **kwargs):
    """
    This function is used to produce activations for the outputs of any type of layer.

    Args:

        x: input tensor.
        activation: Refer to the ``add_layer`` method.
        input_size: supply the size of the inputs.
        verbose: typical toolbox verbose
        dimension: used only for maxout. Give the dimension on which to maxout.

    Returns:

        tuple: ``(out, out_shp)``

    """
    if verbose >= 3:
        print("... Setting up activations")

    # some activations like maxouts are supplied with special support parameters
    if type(activation) is tuple:
        if activation[0] == 'maxout':
            maxout_size = activation[2]
            maxout_type = activation[1]
            out, out_shp = activations.Maxout(x=x,
                                              maxout_size=maxout_size,
                                              input_size=input_size,
                                              type=maxout_type,
                                              dimension=kwargs["dimension"])
        if activation[0] == 'relu':
            relu_leak = activation[1]
            out = activations.ReLU(x=x, alpha=relu_leak)
            out_shp = input_size
        if activation[0] == 'softmax':
            temperature = activation[1]
            out = activations.Softmax(x=x, temp=temperature)
            out_shp = input_size
    else:
        if activation == 'relu':
            out = activations.ReLU(x=x)
        elif activation == 'abs':
            out = activations.Abs(x=x)
        elif activation == 'sigmoid':
            out = activations.Sigmoid(x=x)
        elif activation == 'tanh':
            out = activations.Tanh(x=x)
        elif activation == 'softmax':
            out = activations.Softmax(x=x)
        elif activation == 'squared':
            out = activations.Squared(x=x)
        elif activation is None:
            out = x
        out_shp = input_size

    if verbose >= 3:
        print("... Activations are setup")

    return (out, out_shp)
Пример #2
0
 def test_softmax(self):
     expected_array = np.array([[
         1.74500937e-03, 3.50494502e-02, 2.36161338e-04, 2.58982354e-01,
         7.03987026e-01
     ],
                                [
                                    1.74500937e-03, 3.50494502e-02,
                                    2.36161338e-04, 2.58982354e-01,
                                    7.03987026e-01
                                ],
                                [
                                    1.74500937e-03, 3.50494502e-02,
                                    2.36161338e-04, 2.58982354e-01,
                                    7.03987026e-01
                                ],
                                [
                                    1.74500937e-03, 3.50494502e-02,
                                    2.36161338e-04, 2.58982354e-01,
                                    7.03987026e-01
                                ],
                                [
                                    1.74500937e-03, 3.50494502e-02,
                                    2.36161338e-04, 2.58982354e-01,
                                    7.03987026e-01
                                ]])
     theano_result = A.Softmax(self.theano_input).eval(
         {self.theano_input: self.numpy_input})
     self.assertEqual(theano_result.shape, expected_array.shape)
     self.assertTrue(np.allclose(theano_result, expected_array))
Пример #3
0
 def test_temperature_softmax(self):
     """
     Utilized np.asarray(np.random.uniform(2, 10, 1), dtype = theano.config.floatX)
     to hardcode a value for t.
     """
     t = np.array([7.94705237])
     expected_array = np.array(
         [[0.1381257, 0.20147445, 0.10739337, 0.25912957, 0.29387691],
          [0.1381257, 0.20147445, 0.10739337, 0.25912957, 0.29387691],
          [0.1381257, 0.20147445, 0.10739337, 0.25912957, 0.29387691],
          [0.1381257, 0.20147445, 0.10739337, 0.25912957, 0.29387691],
          [0.1381257, 0.20147445, 0.10739337, 0.25912957, 0.29387691]])
     theano_result = A.Softmax(self.theano_input, temp=t).eval(
         {self.theano_input: self.numpy_input})
     self.assertEqual(theano_result.shape, expected_array.shape)
     self.assertTrue(np.allclose(theano_result, expected_array))