Esempio n. 1
0
    def test_matmul_relu(self):
        """Tests that matrix multiply can be combined with a simple relu."""
        lhs = tile.Value.from_ndims(2)
        rhs = tile.Value.from_dimensions((3, None))
        out = op.relu(op.matmul(lhs, rhs))
        func = tile.compose(self._ctx,
                            self._dev,
                            inputs=[('lhs', lhs), ('rhs', rhs)],
                            outputs=[('out', out)])

        invoker = plaidml.Invoker(self._ctx, func)
        invoker.set_input('lhs', self.make_inited_tensor((3, 3)))
        invoker.set_input('rhs', self.make_inited_tensor((3, 3)))
        output = self.make_output_tensor(invoker.get_output_shape('out'))
        invoker.set_output('out', output)
        invoker.invoke()

        with output.mmap_current() as view:
            self.assertEqual(view[0], 1.0 + 8.0 + 21.0)
            self.assertEqual(view[1], 2.0 + 10.0 + 24.0)
            self.assertEqual(view[2], 3.0 + 12.0 + 27.0)
            self.assertEqual(view[(1, 0)], 4.0 + 20.0 + 42.0)
            self.assertEqual(view[(1, 1)], 8.0 + 25.0 + 48.0)
            self.assertEqual(view[(1, 2)], 12.0 + 30.0 + 54.0)
            self.assertEqual(view[6], 7.0 + 32.0 + 63.0)
            self.assertEqual(view[7], 14.0 + 40.0 + 72.0)
            self.assertEqual(view[8], 21.0 + 48.0 + 81.0)
Esempio n. 2
0
 def relu(data):
     return (op.relu(data), )
Esempio n. 3
0
 def prelu(x, slope):
     if slope.shape.ndims == 1 and x.shape.ndims > 2:
         slope = op.reshape(slope, [slope.shape.dims[0]] + [1] *
                            (x.shape.ndims - 2))
     return (op.relu(x, alpha=slope), )
Esempio n. 4
0
 def leaky_relu(x, alpha=0.01):
     return (op.relu(x, alpha), )
Esempio n. 5
0
 def leaky_relu(unused_ctx, x, alpha=0.01):
     return (op.relu(x, alpha), )
Esempio n. 6
0
 def prelu(unused_ctx, x, slope):
     # N.B. According to the operator specification, this is the V6 behavior of PRelu.
     # The ONNX backend tests, however, expect that at V6, the V1 operator behavior will be used
     # (broadcasting a one-dimensional slope to the channels dimension of the input tensor);
     # at V7, they expect the unidirectional broadcast behavior (implemented by this method).
     return (op.relu(x, alpha=slope), )
Esempio n. 7
0
 def relu(unused_ctx, data):
     return (op.relu(data), )