Beispiel #1
0
def test_linear_regression():
    """Test making simple 1d linear regression and train it."""
    x = np.arange(0, 10)

    # Co-domain
    y = x + 5

    # Variables
    a = tj.var(np.random.rand())
    b = tj.var(np.random.rand())

    err = tj.mse(y, a * x + b)

    LOGGER.info("Optimizing a simple 2 linear regression.")
    LOGGER.info(" X domain is [0, 1 .. 10]")
    LOGGER.info(" Y domain is [5, 6, .. 15]")

    LOGGER.info("before training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))

    for i in range(1200):
        g = tj.gradients(err, [a, b])

        a.update(a.v - np.mean(g[0]) * 1e-2)
        b.update(b.v - np.mean(g[1]) * 1e-2)

    LOGGER.info("after training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))
Beispiel #2
0
def test_gd_on_linear_regression():
    """Test making simple 1d linear regression and train it."""
    x = np.arange(0, 10)

    # Co-domain
    y = x + 5

    # Variables
    a = tj.var(np.random.rand())
    b = tj.var(np.random.rand())

    err = tj.mse(y, a * x + b)

    LOGGER.info("Optimizing a simple 2 linear regression.")
    LOGGER.info(" X domain is [0, 1 .. 10]")
    LOGGER.info(" Y domain is [5, 6, .. 15]")

    LOGGER.info("before training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))

    opt = tj.opt.gd(err)
    opt.rounds = 1200

    opt.minimise([a, b])

    LOGGER.info("after training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))
Beispiel #3
0
def test_logistic_regression():
    """Test making simple 1d linear regression and train it."""
    x = np.random.rand(10) - 0.5
    y = sigmoid(4 * x - 1)

    a = tj.var(np.random.rand())
    b = tj.var(np.random.rand())

    o = tj.sigmoid(a * x + b)
    err = tj.mse(y, o)

    LOGGER.info("Optimizing a simple logistic regression.")
    LOGGER.info(" X domain is (10, 2)")
    LOGGER.info(" Y domain is (10, 1)")

    LOGGER.info("before training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))

    for i in range(5000):
        g = tj.gradients(err, [a, b])

        a.update(a.v - np.mean(g[0]) * 1e-0)
        b.update(b.v - np.mean(g[1]) * 1e-0)

    LOGGER.info("after training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))

    LOGGER.info("perfect would be 4 and -1")
    LOGGER.info("predictions %s", np.round(o.output()))
    LOGGER.info("observations %s", np.round(y))
Beispiel #4
0
def test_gd_on_logistic_regression():
    """Test making simple 1d linear regression and train it."""
    x = np.random.rand(10) - 0.5
    y = sigmoid(4 * x - 1)

    a = tj.var(np.random.rand())
    b = tj.var(np.random.rand())

    o = tj.sigmoid(a * x + b)
    err = tj.mse(y, o)

    LOGGER.info("Optimizing a simple logistic regression.")
    LOGGER.info(" X domain is (10, 2)")
    LOGGER.info(" Y domain is (10, 1)")

    LOGGER.info("before training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))

    opt = tj.opt.gd(err)
    opt.dt = 1e-0
    opt.rounds = 5000

    opt.minimise([a, b])

    LOGGER.info("after training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))

    LOGGER.info("perfect would be 4 and -1")
    LOGGER.info("predictions %s", np.round(o.output()))
    LOGGER.info("observations %s", np.round(y))
Beispiel #5
0
def test_remove():
    """Test the graph remove functionality."""
    LOGGER.info("Testing removing a functor.")
    LOGGER.info("Building Graph.")

    tj.tjgraph.clear()

    a = tj.var(5)
    b = tj.sigmoid(a)
    c = tj.sigmoid(b)
    c = tj.sigmoid(c)

    d = c + 5
    d = d + 5

    assert len(tj.tjgraph.nodes) == 6, "Graph should contain 5 nodes "\
        + "Graph contains %s nodes" % len(tj.tjgraph.nodes)

    tj.tjgraph.remove(a)

    assert len(tj.tjgraph.nodes) == 1, "Graph should contain 1 monoid "\
        + "Graph contains %s nodes" % len(tj.tjgraph.nodes)

    o = d.output()
    assert d.output() == 10, ("Output should be 5 is %s" % o)
Beispiel #6
0
def test_1d_simples():
    """Test trivial gradient updates."""
    LOGGER.info("Testing Addition.")
    a = tj.var(np.ones(3) * 5)
    b = tj.var(np.ones(3) * 10)

    c = tj.add(a, b)

    for i in range(5):
        g = tj.gradients(c, [a])
        LOGGER.info("%s *** a: %s -- b: %s -- g: %s -- dt: 0.1" % (i, a, b, g))

        a.update(a.v + g[0] * 1e-1)

    assert _true(a.v > 5.0), "A should be larger than 5 but is %s" % a

    LOGGER.info("Testing Subtraction.")
    a = tj.var(np.ones(3) * 5)
    b = tj.var(np.ones(3) * 10)

    c = tj.sub(a, b)

    for i in range(5):
        g = tj.gradients(c, [b])
        LOGGER.info("%s *** a: %s -- b: %s -- g: %s -- dt: 0.1" % (i, a, b, g))

        b.update(b.v + g[0] * 1e-1)

    assert _true(b.v < 10.0), "A should be smaller than 10 but is %s" % a

    LOGGER.info("Testing Multiplication.")
    a = tj.var(np.ones(3) * 5)
    b = tj.var(np.ones(3) * 10)

    c = tj.mul(a, b)

    for i in range(5):
        g = tj.gradients(c, [a])
        LOGGER.info("%s *** a: %s -- b: %s -- g: %s -- dt: 0.1" % (i, a, b, g))

        a.update(a.v + g[0] * 1e-1)

    assert _true(a.v > 5.0), "A should be larger than 5 but is %s" % a

    LOGGER.info("Testing Division.")
    a = tj.var(np.ones(3) * 5)
    b = tj.var(np.ones(3) * 10)

    c = tj.div(a, b)

    for i in range(5):
        g = tj.gradients(c, [b])
        LOGGER.info("%s *** a: %s -- b: %s -- g: %s -- dt: 0.1" % (i, a, b, g))

        b.update(b.v + g[0] * 1e-1)

    assert _true(b.v < 10.0), "B should be smaller than 10 but is %s" % a
Beispiel #7
0
def test_convex():
    """Test optimising a simple convex function."""
    LOGGER.info("Testing simple convex: x^2")
    for s in [1, 3]:
        a = tj.var(np.ones(s))
        b = tj.mul(a, a)

        LOGGER.info("Initial a: %s" % a)

        for i in range(100):
            g = tj.gradients(b, [a])

            a.update(a.v - g[0] * 1e-1)

        LOGGER.info("Final a: %s" % a)

        assert _true(
            abs(a.v) < 1.0), "A should be smaller than 1 but is %s" % a

    LOGGER.info("Testing more complex convex: (x * 5 + 3 - x)^2")

    for s in [1, 3]:
        a = tj.var(np.ones(s))
        b = tj.var(5)

        c = tj.mul(a, b)
        c = tj.add(c, 3)
        c = tj.sub(c, a)
        c = tj.mul(c, c)

        LOGGER.info("Initial a: %s" % a)

        for i in range(100):
            g = tj.gradients(c, [a])

            a.update(a.v - g[0] * 1e-2)

        LOGGER.info("Final a: %s" % a.v)

        assert _true(
            abs(a.v) < 1.0), "A should be smaller than 1 but is %s" % a
Beispiel #8
0
def test_adding_monoids():
    """See if monoids is added to graph."""
    tj.tjgraph.clear()

    LOGGER.info("Checking right number of nodes.")
    a = tj.var(np.random.rand(1, 5))
    b = tj.var(np.random.rand(1, 5))

    c = tj.add(a, b, name="first-addition")

    nodes = tj.tjgraph.get_nodes()
    vars = tj.tjgraph.get_variables()

    assert len(nodes) == 3, "Nodes should be 3 is %s" % len(nodes)
    assert len(vars) == 2, "Vars should be 2 is %s" % len(vars)

    LOGGER.info("Checking names.")
    var_names = [v.name for v in vars]
    node_names = [n.name for n in nodes]
    assert "first-addition" in node_names,\
        "first-addition should be in %s" % node_names

    assert "first-addition" not in var_names,\
        "first-addition should not be in %s" % var_names

    d = tj.mul(c, b, name="first-multiplication")

    nodes = tj.tjgraph.get_nodes()
    node_names = [n.name for n in nodes]

    LOGGER.info("Checking more right number of nodes.")
    assert len(nodes) == 4, "Nodes should be 4 is %s" % len(nodes)
    assert "first-addition" in node_names,\
        "first-addition should be in %s" % node_names

    tj.tjgraph.clear()

    nodes = tj.tjgraph.get_nodes()
    vars = tj.tjgraph.get_variables()

    LOGGER.info("Testing clearing")
    assert len(nodes) == 0, "Nodes should be 0 is %s" % len(nodes)
    assert len(vars) == 0, "Vars should be 0 is %s" % len(vars)

    a = tj.var(np.random.rand(1, 5))
    b = tj.var(np.random.rand(1, 5))

    c = tj.add(a, b)
    c = tj.add(c, a)
    c = tj.add(c, b)
    d = tj.mul(c, b, name="cookie")
    e = tj.sub(c, d)
    f = tj.div(d, e, name="milk")
    f = tj.add(f, f)

    nodes = tj.tjgraph.get_nodes()
    vars = tj.tjgraph.get_variables()

    var_names = [v.name for v in vars]
    node_names = [n.name for n in nodes]

    LOGGER.info("Test adding a lot of monoids")
    assert len(nodes) == 9, "Nodes should be 9 is %s" % len(nodes)
    assert len(vars) == 2, "Vars should be 2 is %s" % len(vars)

    LOGGER.info("Checking names")
    assert "cookie" in node_names,\
        "cookie should be in %s" % node_names

    assert "cookie" not in var_names,\
        "cookie should not be in %s" % var_names

    assert "milk" in node_names,\
        "milk should be in %s" % node_names

    assert "milk" not in var_names,\
        "milk should not be in %s" % var_names

    tj.tjgraph.clear()

    nodes = tj.tjgraph.get_nodes()
    vars = tj.tjgraph.get_variables()

    LOGGER.info("Testing clearing")
    assert len(nodes) == 0, "Nodes should be 0 is %s" % len(nodes)
    assert len(vars) == 0, "Vars should be 0 is %s" % len(vars)
Beispiel #9
0
def test_cache():
    """Test the cache functionality of the graph."""
    LOGGER.info("Testing cache.")

    LOGGER.info("Running logistic regression test.")

    x = np.random.rand(10) - 0.5
    y = sigmoid(4 * x - 1)

    a = tj.var(np.random.rand())
    b = tj.var(np.random.rand())

    o = tj.sigmoid(a * x + b)
    err = tj.mse(y, o)

    LOGGER.info("Optimizing a simple logistic regression.")
    LOGGER.info(" X domain is (10, 2)")
    LOGGER.info(" Y domain is (10, 1)")

    LOGGER.info("before training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))

    opt = tj.opt.gd(err)
    opt.dt = 1e-0
    opt.rounds = 5000

    opt.minimise([a, b])

    LOGGER.info("after training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))
    """ ------------ """

    LOGGER.info("Enabling cache and running the same calculations.")

    tj.tjgraph.cache()

    a.update(np.random.rand())
    b.update(np.random.rand())

    LOGGER.info("before training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))

    opt = tj.opt.gd(err)
    opt.dt = 1e-0
    opt.rounds = 5000

    opt.minimise([a, b])

    LOGGER.info("after training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))
    """ ------------ """

    LOGGER.info("Disableing cache and running the same calculations.")

    tj.tjgraph.no_cache()

    a.update(np.random.rand())
    b.update(np.random.rand())

    LOGGER.info("before training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))

    opt = tj.opt.gd(err)
    opt.dt = 1e-0
    opt.rounds = 5000

    opt.minimise([a, b])

    LOGGER.info("after training: coefficient %s -- bias: %s -- mse: %s" %
                (a, b, err.output()))
    """ ------------ """

    LOGGER.info("Testing if cache makes a difference performance wise.")
    """Make a loong graph."""
    a = tj.var(np.random.rand())
    b = tj.var(np.random.rand())

    sys.setrecursionlimit(5000)

    timestamp = time.time()
    c = a + b
    for _ in range(2000):
        c = a + b + c

    LOGGER.info("Making graph with %s ops took %s seconds" %
                (3 * 2000, time.time() - timestamp))

    iters = 200

    timestamp = time.time()
    for _ in range(iters):
        c.output()

    LOGGER.info("Running %s iters without cache took %s seconds" %
                (iters, time.time() - timestamp))

    timestamp = time.time()
    tj.tjgraph.cache()

    LOGGER.info("Cacheing graph took %s seconds" % (time.time() - timestamp))

    timestamp = time.time()
    for _ in range(iters):
        c.output()

    LOGGER.info("Running %s iters with cache took %s seconds" %
                (iters, time.time() - timestamp))

    timestamp = time.time()
    for _ in range(iters):
        a.update(np.random.rand())
        c.output()

    LOGGER.info("Running %s iters with cache and update took %s seconds" %
                (iters, time.time() - timestamp))

    tj.tjgraph.no_cache()

    timestamp = time.time()
    for _ in range(iters):
        a.update(np.random.rand())
        c.output()

    LOGGER.info("Running %s iters with no cache and update took %s seconds" %
                (iters, time.time() - timestamp))
Beispiel #10
0
def test_adding_variables():
    """See if variables is added to graph."""
    LOGGER.info("Adding variables.")
    tj.tjgraph.clear()
    tj.var(np.random.rand())
    tj.var(np.random.rand(1, 10))
    tj.var(np.random.rand(5, 5))
    tj.var(np.random.rand(), name="Hello")

    LOGGER.info("Getting variables.")
    vars = tj.tjgraph.get_variables()
    assert len(vars) == 4, "Variables should be len 4 not %s" % len(vars)
    assert "Hello" in [v.name for v in vars],\
        "Hello should be in vars found: %s" % [v.name for v in vars]

    LOGGER.info("Clearing variables.")
    tj.tjgraph.clear()

    vars = tj.tjgraph.get_variables()
    assert len(vars) == 0, "Cleared graph contained %s vars" % len(vars)

    tj.var(np.random.rand())
    tj.var(np.random.rand(1, 10))
    tj.var(np.random.rand(5, 5))
    tj.var(np.random.rand(), name="Hello")

    LOGGER.info("Getting specific variable.")
    vars = tj.tjgraph.get_variables(names=["Hello"])
    assert len(vars) == 1,\
        "Should only be on variable named 'Hello' found %s"\
        % [v.name for v in vars]

    assert "Hello" in [v.name for v in vars],\
        "'Hello' should be in %s" % [v.name for v in vars]

    LOGGER.info("Create two variables with same name: 'Hi' and 'Hi'")
    v1 = tj.var(np.random.rand(), name="Hi")
    v2 = tj.var(np.random.rand(), name="Hi")

    assert v1.name != v2.name,\
        "name collision %s and %s" % (v1.name, v2.name)

    LOGGER.info("Name after creation: %s and %s" % (v1.name, v2.name))

    tj.tjgraph.clear()