Example #1
0
def test_1d_simples():
    """Test trivial gradient updates."""
    LOGGER.info("Testing Addition.")
    a = tj.var(np.ones(3) * 5)
    b = tj.var(np.ones(3) * 10)

    c = tj.add(a, b)

    for i in range(5):
        g = tj.gradients(c, [a])
        LOGGER.info("%s *** a: %s -- b: %s -- g: %s -- dt: 0.1" % (i, a, b, g))

        a.update(a.v + g[0] * 1e-1)

    assert _true(a.v > 5.0), "A should be larger than 5 but is %s" % a

    LOGGER.info("Testing Subtraction.")
    a = tj.var(np.ones(3) * 5)
    b = tj.var(np.ones(3) * 10)

    c = tj.sub(a, b)

    for i in range(5):
        g = tj.gradients(c, [b])
        LOGGER.info("%s *** a: %s -- b: %s -- g: %s -- dt: 0.1" % (i, a, b, g))

        b.update(b.v + g[0] * 1e-1)

    assert _true(b.v < 10.0), "A should be smaller than 10 but is %s" % a

    LOGGER.info("Testing Multiplication.")
    a = tj.var(np.ones(3) * 5)
    b = tj.var(np.ones(3) * 10)

    c = tj.mul(a, b)

    for i in range(5):
        g = tj.gradients(c, [a])
        LOGGER.info("%s *** a: %s -- b: %s -- g: %s -- dt: 0.1" % (i, a, b, g))

        a.update(a.v + g[0] * 1e-1)

    assert _true(a.v > 5.0), "A should be larger than 5 but is %s" % a

    LOGGER.info("Testing Division.")
    a = tj.var(np.ones(3) * 5)
    b = tj.var(np.ones(3) * 10)

    c = tj.div(a, b)

    for i in range(5):
        g = tj.gradients(c, [b])
        LOGGER.info("%s *** a: %s -- b: %s -- g: %s -- dt: 0.1" % (i, a, b, g))

        b.update(b.v + g[0] * 1e-1)

    assert _true(b.v < 10.0), "B should be smaller than 10 but is %s" % a
Example #2
0
def test_linear_regression():
    """Test making simple 1d linear regression and train it."""
    x = np.arange(0, 10)

    # Co-domain
    y = x + 5

    # Variables
    a = tj.var(np.random.rand())
    b = tj.var(np.random.rand())

    err = tj.mse(y, a * x + b)

    LOGGER.info("Optimizing a simple 2 linear regression.")
    LOGGER.info(" X domain is [0, 1 .. 10]")
    LOGGER.info(" Y domain is [5, 6, .. 15]")

    LOGGER.info("before training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))

    for i in range(1200):
        g = tj.gradients(err, [a, b])

        a.update(a.v - np.mean(g[0]) * 1e-2)
        b.update(b.v - np.mean(g[1]) * 1e-2)

    LOGGER.info("after training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))
Example #3
0
def test_logistic_regression():
    """Test making simple 1d linear regression and train it."""
    x = np.random.rand(10) - 0.5
    y = sigmoid(4 * x - 1)

    a = tj.var(np.random.rand())
    b = tj.var(np.random.rand())

    o = tj.sigmoid(a * x + b)
    err = tj.mse(y, o)

    LOGGER.info("Optimizing a simple logistic regression.")
    LOGGER.info(" X domain is (10, 2)")
    LOGGER.info(" Y domain is (10, 1)")

    LOGGER.info("before training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))

    for i in range(5000):
        g = tj.gradients(err, [a, b])

        a.update(a.v - np.mean(g[0]) * 1e-0)
        b.update(b.v - np.mean(g[1]) * 1e-0)

    LOGGER.info("after training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))

    LOGGER.info("perfect would be 4 and -1")
    LOGGER.info("predictions %s", np.round(o.output()))
    LOGGER.info("observations %s", np.round(y))
Example #4
0
 def minimise(self, nodes: ["node.node"]) -> None:
     """Minimise op."""
     cache = list(enumerate(nodes))
     for i in range(self.rounds):
         grads = tensorjo.gradients(self.master, nodes)
         for i, n in cache:
             n.update(n.v - np.mean(grads[i]) * self.dt)
Example #5
0
def test_convex():
    """Test optimising a simple convex function."""
    LOGGER.info("Testing simple convex: x^2")
    for s in [1, 3]:
        a = tj.var(np.ones(s))
        b = tj.mul(a, a)

        LOGGER.info("Initial a: %s" % a)

        for i in range(100):
            g = tj.gradients(b, [a])

            a.update(a.v - g[0] * 1e-1)

        LOGGER.info("Final a: %s" % a)

        assert _true(
            abs(a.v) < 1.0), "A should be smaller than 1 but is %s" % a

    LOGGER.info("Testing more complex convex: (x * 5 + 3 - x)^2")

    for s in [1, 3]:
        a = tj.var(np.ones(s))
        b = tj.var(5)

        c = tj.mul(a, b)
        c = tj.add(c, 3)
        c = tj.sub(c, a)
        c = tj.mul(c, c)

        LOGGER.info("Initial a: %s" % a)

        for i in range(100):
            g = tj.gradients(c, [a])

            a.update(a.v - g[0] * 1e-2)

        LOGGER.info("Final a: %s" % a.v)

        assert _true(
            abs(a.v) < 1.0), "A should be smaller than 1 but is %s" % a