Ejemplo n.º 1
0
    def test_class(self):
        class Test(object):
            def f(self, x):
                return x + 100.0

            @classmethod
            def g(cls, x):
                return x + 100.0

            @staticmethod
            def h(x):
                return x + 100.0

        test = Test()
        t = Tracer()
        x = 1.0
        o = t.trace(test.f, x)
        f = t.compile_function(x, o)
        assert(f(2.0) == 102.0)

        o = t.trace(test.g, x)
        f = t.compile_function(x, o)
        assert(f(2.0) == 102.0)

        o = t.trace(test.h, x)
        f = t.compile_function(x, o)
        assert(f(2.0) == 102.0)
Ejemplo n.º 2
0
    def test_class(self):
        class Test(object):
            def f(self, x):
                return x + 100.0

            @classmethod
            def g(cls, x):
                return x + 100.0

            @staticmethod
            def h(x):
                return x + 100.0

        test = Test()
        t = Tracer()
        x = 1.0
        o = t.trace(test.f, x)
        f = t.compile_function(x, o)
        assert(f(2.0) == 102.0)

        o = t.trace(test.g, x)
        f = t.compile_function(x, o)
        assert(f(2.0) == 102.0)

        o = t.trace(test.h, x)
        f = t.compile_function(x, o)
        assert(f(2.0) == 102.0)
Ejemplo n.º 3
0
    def test_access_attribute(self):
        class Test(object):
            def __init__(self):
                self.x = np.arange(5.) - 10.0

            def getx(self):
                return self.x

        test = Test()

        def f(x):
            return np.dot(x, test.x)

        x = np.arange(5.)
        t = Tracer()
        o = t.trace(f, x)
        g = t.compile_gradient(x, o, wrt=test.x)
        self.assertTrue(np.allclose(g(x), x))
Ejemplo n.º 4
0
    def test_access_attribute(self):
        class Test(object):
            def __init__(self):
                self.x = np.arange(5.) - 10.0

            def getx(self):
                return self.x

        test = Test()

        def f(x):
            return np.dot(x, test.x)

        x = np.arange(5.)
        t = Tracer()
        o = t.trace(f, x)
        g = t.compile_gradient(x, o, wrt=test.x)
        self.assertTrue(np.allclose(g(x), x))
Ejemplo n.º 5
0
    def test_readme(self):
        """ the original README example"""

        # -- a vanilla function
        def f1(x):
            return x + 2

        # -- a function referencing a global variable
        y = np.random.random(10)

        def f2(x):
            return x * y

        # -- a function with a local variable
        def f3(x):
            z = tag(np.ones(10), 'local_var')
            return (x + z) ** 2

        # -- create a general symbolic tracer and apply
        #    it to the three functions
        x = np.random.random(10)
        tracer = Tracer()

        out1 = tracer.trace(f1, x)
        out2 = tracer.trace(f2, out1)
        out3 = tracer.trace(f3, out2)

        # -- compile a function representing f(x, y, z) = out3
        new_fn = tracer.compile_function(inputs=[x, y, 'local_var'],
                                         outputs=out3)

        # -- compile the gradient of f(x) = out3, with respect to y
        fn_grad = tracer.compile_gradient(inputs=x,
                                          outputs=out3,
                                          wrt=y,
                                          reduction=theano.tensor.sum)

        assert fn_grad  # to stop flake error

        self.assertTrue(np.allclose(new_fn(x, y, np.ones(10)), f3(f2(f1(x)))))
Ejemplo n.º 6
0
    def test_multiple_trace(self):
        def f1(x):
            return x + 1.0

        def f2(x):
            return x * 2.0

        def f3(x):
            return x ** 2
        t = Tracer()
        x = np.random.random((3, 4))
        o1 = t.trace(f1, x)
        o2 = t.trace(f2, o1)
        o3 = t.trace(f3, o2)

        # test function
        f = t.compile_function(x, o3)
        self.assertTrue(np.allclose(f(x), f3(f2(f1(x)))))

        # test gradient
        o4 = t.trace(lambda x: x.sum(), o3)
        g = t.compile_gradient(x, o4, wrt=x)
        self.assertTrue(np.allclose(g(x), 8 * (x+1)))
Ejemplo n.º 7
0
    def test_readme(self):
        """ the original README example"""

        # -- a vanilla function
        def f1(x):
            return x + 2

        # -- a function referencing a global variable
        y = np.random.random(10)

        def f2(x):
            return x * y

        # -- a function with a local variable
        def f3(x):
            z = tag(np.ones(10), 'local_var')
            return (x + z) ** 2

        # -- create a general symbolic tracer and apply
        #    it to the three functions
        x = np.random.random(10)
        tracer = Tracer()

        out1 = tracer.trace(f1, x)
        out2 = tracer.trace(f2, out1)
        out3 = tracer.trace(f3, out2)

        # -- compile a function representing f(x, y, z) = out3
        new_fn = tracer.compile_function(inputs=[x, y, 'local_var'],
                                         outputs=out3)

        # -- compile the gradient of f(x) = out3, with respect to y
        fn_grad = tracer.compile_gradient(inputs=x,
                                          outputs=out3,
                                          wrt=y,
                                          reduction=theano.tensor.sum)

        assert fn_grad  # to stop flake error

        self.assertTrue(np.allclose(new_fn(x, y, np.ones(10)), f3(f2(f1(x)))))
Ejemplo n.º 8
0
    def test_multiple_trace(self):
        def f1(x):
            return x + 1.0

        def f2(x):
            return x * 2.0

        def f3(x):
            return x ** 2
        t = Tracer()
        x = np.random.random((3, 4))
        o1 = t.trace(f1, x)
        o2 = t.trace(f2, o1)
        o3 = t.trace(f3, o2)

        # test function
        f = t.compile_function(x, o3)
        self.assertTrue(np.allclose(f(x), f3(f2(f1(x)))))

        # test gradient
        o4 = t.trace(lambda x: x.sum(), o3)
        g = t.compile_gradient(x, o4, wrt=x)
        self.assertTrue(np.allclose(g(x), 8 * (x+1)))