예제 #1
0
def test_invalid_fetches():
    with pm.Node():
        a = pm.placeholder()
    graph = pm.Node()

    with pytest.raises(RuntimeError):
        graph(a)

    with pytest.raises(KeyError):
        graph('a')

    with pytest.raises(ValueError):
        graph(123)
예제 #2
0
def reco(m_=3, n_=3, k_=2):
    with pm.Node(name="recommender") as graph:
        mu = pm.placeholder("mu")
        m = pm.placeholder("m")
        n = pm.placeholder("n")
        k = pm.placeholder("k")
        x1 = pm.placeholder("x1", shape=k)
        x2 = pm.placeholder("x2", shape=k)

        r1 = pm.placeholder("r1", shape=m)
        y1 = pm.placeholder("y1", shape=m)

        r2 = pm.placeholder("r2", shape=n)
        y2 = pm.placeholder("y2", shape=n)

        w1 = pm.placeholder("w1", shape=(m, k))
        w2 = pm.placeholder("w2", shape=(n, k))
        i = pm.index(0, m - 1, name="i")
        j = pm.index(0, n - 1, name="j")
        l = pm.index(0, k - 1, name="l")
        h1_sum = pm.sum([l], (w1[i, l] * x2[l]).set_name("w1*x2")).set_name("h1_sum")
        h1 = (h1_sum[i] * r1[i]).set_name("h1")
        h2_sum = pm.sum([l], (x1[l] * w2[j, l]).set_name("x1*w2")).set_name("h2_sum")
        h2 = (h2_sum[j] * r2[j]).set_name("h2")
        #
        d1 = (h1[i] - y1[i]).set_name("d1")
        d2 = (h2[j] - y2[j]).set_name("d2")
        g1 = (d1[i] * x2[l]).set_name("g1")
        g2 = (d2[j] * x1[l]).set_name("g2")
        w1_ = (w1[i, l] - g1[i, l]).set_name("w1_")
        w2_ = (w2[i, l] - g2[i, l]).set_name("w2_")

    shape_val_pass = pm.NormalizeGraph({"m": m_, "n": n_, "k": k_})
    new_graph, res = shape_val_pass(graph)
    return new_graph
예제 #3
0
def test_unary_operators(value, operator):
    expected = eval('%s value' % operator)
    with pm.Node() as graph:
        operation = eval('%s pm.parameter(default=value)' % operator)
    actual = graph(operation)
    assert actual == expected, "expected %s %s = %s but got %s" % \
        (operator, value, expected, actual)
예제 #4
0
def test_flatten_result_length():
    with pm.Node(name="linear_reg") as graph:
        m = pm.placeholder("m", type_modifier="param")
        x = pm.placeholder("x", shape=(m), type_modifier="input")
        y = pm.placeholder("y", type_modifier="input")
        w = pm.placeholder("w", shape=(m), type_modifier="state")
        mu = pm.placeholder("mu", default_val=1.0, type_modifier="param")
        i = pm.index(0, (m - 1).set_name("m-1")).set_name("i")
        h = pm.sum([i], (x[i] * w[i]).set_name("x*w"), name="h")
        d = (h - y).set_name("h-y")
        g = (d * x[i]).set_name("d*x")
        w_ = (w[i] - (mu * g[i]).set_name("mu*g")).set_name(("w_out"))

    shape_val_pass = NormalizeGraph({"m": 3})
    count_pass = CountNodes()
    flatten_pass = Lower({})

    new_graph = shape_val_pass(graph)

    flattened_g = flatten_pass(new_graph)
    x = np.random.randint(0, 10, 10)
    y = np.random.randint(0, 10, 1)[0]
    w = np.random.randint(0, 10, 10)

    orig_graph = count_pass(flattened_g)
예제 #5
0
def test_single_dim_norm():
    with pm.Node(name="elem1") as graph:
        m = pm.parameter("m")
        x = pm.input("x", shape=m)
        w = pm.state("w", shape=m)
        i = pm.index(0, m - 1, name="i")
        w[i] = (w[i] * x[i])
    x_ = np.random.randint(0, 10, 3)
    w_ = np.random.randint(0, 10, 3)
    coarse_eval = graph("w", x=x_, w=w_)

    np_result = x_ * w_
    np.testing.assert_allclose(coarse_eval, np_result)
    shape_pass = NormalizeGraph({"m": 3})
    graph_shapes = shape_pass(graph)

    shape_res = graph_shapes("w", x=x_, w=w_)
    np.testing.assert_allclose(shape_res, np_result)
    lower_pass = Lower({})
    lowered_graph = lower_pass(graph_shapes)
    input_info = {f"w/w({i},)": w_[i] for i in range(len(w_))}
    input_info.update({f"x/x({i},)": x_[i] for i in range(len(x_))})
    fine_grained_eval = lowered_graph("w/w(1,)", input_info)

    assert fine_grained_eval == np_result[1]

    pb_path = f"{OUTPATH}/{graph.name}.srdfg"
    pm.pb_store(lowered_graph, OUTPATH)
    loaded_node = pm.pb_load(pb_path)
    input_info = {f"w/w({i},)": w_[i] for i in range(len(w_))}
    input_info.update({f"x/x({i},)": x_[i] for i in range(len(x_))})
    fine_grained_eval = loaded_node("w/w(1,)", input_info)
    assert fine_grained_eval == np_result[1]
예제 #6
0
def test_linear_deserialize():

    graph_name = "linear_reg1"
    with pm.Node(name=graph_name) as graph:
        m = pm.placeholder("m")
        x_ = pm.placeholder("x", shape=(m))
        y_ = pm.placeholder("y")
        w_ = pm.placeholder("w", shape=(m))
        mu = pm.parameter(name="mu", default=1.0)
        i = pm.index(0, (m - 1).set_name("m-1"), name="i")
        h = pm.sum([i], (x_[i] * w_[i]).set_name("x*w"), name="h")
        d = (h - y_).set_name("h-y")
        g = (d * x_[i]).set_name("d*x")
        mug = (mu * g[i]).set_name("mu*g[i]")
        w_ = ((w_[i]) - mug).set_name("w_out")
    x = np.random.randint(0, 10, 10)
    y = np.random.randint(0, 10, 1)[0]
    w = np.random.randint(0, 10, 10)

    graph_res = graph("w_out", {"x": x, "y": y, "w": w})
    actual_res = w - ((np.sum(x * w) - y) * x) * 1.0

    np.testing.assert_allclose(graph_res, actual_res)
    cwd = Path(f"{__file__}").parent
    base_path = f"{cwd}/pmlang_examples"
    full_path = f"{base_path}/outputs"
    pb_path = f"{full_path}/{graph_name}.srdfg"
    pm.pb_store(graph, full_path)
    node = pm.pb_load(pb_path)
    new_graph_res = node("w_out", {"x": x, "y": y, "w": w})
    np.testing.assert_allclose(graph_res, new_graph_res)
    np.testing.assert_allclose(actual_res, new_graph_res)
예제 #7
0
def test_multi_dim_op_slice():
    with pm.Node(name="elem2") as graph:
        m = pm.parameter(name="m")
        n = pm.parameter(name="n")
        mu = pm.parameter(name="mu", default=2.0)
        x = pm.input(name="x", shape=(m, n))
        w = pm.state(name="w", shape=(m, n))
        i = pm.index(0, m - 1, name="i")
        j = pm.index(0, n - 1, name="j")
        out = (x[i, j] * w[i, j]).set_name("w_out")
        w[i, j] = (mu * (out[i, j] - w[i, j]))
    m_ = 3
    n_ = 2
    x_ = np.random.randint(0, 10, m_ * n_).reshape((m_, n_))
    w_ = np.random.randint(0, 10, m_ * n_).reshape((m_, n_))
    coarse_eval = graph("w", x=x_, w=w_)
    np_result = (x_ * w_ - w_) * 2.0
    np.testing.assert_allclose(coarse_eval, np_result)
    shape_pass = NormalizeGraph({"m": m_, "n": n_})
    graph_shapes = shape_pass(graph)
    shape_res = graph_shapes("w", x=x_, w=w_)
    np.testing.assert_allclose(shape_res, np_result)
    lower_pass = Lower({})
    lowered_graph = lower_pass(graph_shapes)
    input_info = {}
    for i in range(m_):
        for j in range(n_):
            input_info[f"w/w({i}, {j})"] = w_[i, j]
            input_info[f"x/x({i}, {j})"] = x_[i, j]
    fine_grained_eval = lowered_graph("w/w(2, 1)", input_info)
    assert fine_grained_eval == np_result[2, 1]
예제 #8
0
def test_multi_dim():
    with pm.Node(name="elem4") as graph:
        m = pm.parameter(name="m")
        n = pm.parameter(name="n")
        x = pm.input("x", shape=(m, n))
        w = pm.state("w", shape=(m, n))
        i = pm.index(0, m - 1, name="i")
        j = pm.index(0, n - 1, name="j")
        w[i, j] = (w[i, j] * x[i, j])
    m_ = 3
    n_ = 4
    x_ = np.random.randint(0, 10, m_ * n_).reshape((m_, n_))
    w_ = np.random.randint(0, 10, m_ * n_).reshape((m_, n_))
    coarse_eval = graph("w", x=x_, w=w_)
    np_result = x_ * w_
    np.testing.assert_allclose(coarse_eval, np_result)
    shape_pass = NormalizeGraph({"m": m_, "n": n_})
    graph_shapes = shape_pass(graph)
    shape_res = graph_shapes("w", x=x_, w=w_)
    np.testing.assert_allclose(shape_res, np_result)
    lower_pass = Lower({})
    lowered_graph = lower_pass(graph_shapes)
    input_info = {}
    for i in range(m_):
        for j in range(n_):
            input_info[f"w/w({i}, {j})"] = w_[i, j]
            input_info[f"x/x({i}, {j})"] = x_[i, j]

    fine_grained_eval = lowered_graph("w/w(2, 3)", input_info)
    assert fine_grained_eval == np_result[2, 3]
예제 #9
0
def test_multidim_sigmoid(m_):

    with pm.Node(name="logistic") as graph:
        m = pm.parameter(name="m")
        n = pm.parameter(name="n")
        x = pm.input("x", shape=(m))
        w = pm.state("w", shape=(m))
        i = pm.index(0, m - 1, name="i")
        o = pm.sigmoid(w[i] * x[i], name="out")
    x_ = np.random.randint(0, 10, m_).astype(np.float)
    w_ = np.random.randint(0, 10, m_).astype(np.float)
    shape_dict = {"m": m_}
    input_dict = {"x": x_, "w": w_}
    np_res = sigmoid((x_ * w_))

    coarse_eval = graph("out", input_dict)
    np.testing.assert_allclose(np_res, coarse_eval)
    lowered = set_shape_and_lower(graph, shape_dict)
    keys = [f"out/out({i},)" for i in range(m_)]

    x_ = np.random.randint(0, 10, m_).astype(np.float)
    w_ = np.random.randint(0, 10, m_).astype(np.float)
    input_dict = {}
    for i in range(m_):
        input_dict[f"x/x({i},)"] = x_[i]
        input_dict[f"w/w({i},)"] = w_[i]
    np_res = sigmoid((x_ * w_))

    lower_res = np.asarray(lowered(keys, input_dict)).reshape(np_res.shape)
    np.testing.assert_allclose(lower_res, np_res)
예제 #10
0
def test_single_dim_op_slice():
    with pm.Node(name="elem3") as graph:
        m = pm.parameter(name="m")
        x = pm.input("x", shape=m)
        w = pm.state("w", shape=m)
        i = pm.index(0, m - 1, name="i")
        out = (w[i] * x[i])
        w[i] = (out[i] - w[i])

    m_ = 3
    x_ = np.random.randint(0, 10, m_)
    w_ = np.random.randint(0, 10, m_)

    coarse_eval = graph("w", x=x_, w=w_)
    np_result = x_ * w_ - w_
    np.testing.assert_allclose(coarse_eval, np_result)

    shape_pass = NormalizeGraph({"m": 3})
    graph_shapes = shape_pass(graph)
    shape_res = graph_shapes("w", x=x_, w=w_)

    np.testing.assert_allclose(shape_res, np_result)
    lower_pass = Lower({})
    lowered_graph = lower_pass(graph_shapes)
    input_info = {f"w/w({i},)": w_[i] for i in range(len(w_))}
    input_info.update({f"x/x({i},)": x_[i] for i in range(len(x_))})
    fine_grained_eval = lowered_graph("w/w(2,)", input_info)
    assert fine_grained_eval == np_result[2]
예제 #11
0
def test_dict():
    expected = 13
    with pm.Node() as graph:
        a = pm.parameter(default=expected)
        b = pm.identity({'foo': a})
    actual = graph(b)['foo']
    assert actual is expected, "expected %s but got %s" % (expected, actual)
예제 #12
0
def test_list():
    expected = 13
    with pm.Node() as graph:
        a = pm.parameter(default=expected)
        b = pm.identity([a, a])
    actual, _ = graph(b)
    assert actual is expected, "expected %s but got %s" % (expected, actual)
예제 #13
0
def test_multi_shapes():
    m_ = 5
    n_ = 4
    p_ = 3
    inp_ = np.random.randint(1, 5, (m_, p_))
    w_ = np.random.randint(1, 5, (p_, n_))
    mapping = {"m": m_, "n": n_, "p": p_, "in": inp_, "w": w_}

    numpy_res1 = np.empty(shape=(m_, p_, n_))
    indices = []
    for i in range(m_):
        for k in range(p_):
            for j in range(n_):
                numpy_res1[i][k][j] = inp_[i][k] * w_[k][j]
                indices.append(tuple([i, k, j]))
    numpy_res = np.sum(numpy_res1)

    with pm.Node(name="mmul") as graph:
        m = pm.placeholder("m")
        n = pm.placeholder("n")
        p = pm.placeholder("p")
        inp = pm.placeholder("in", shape=(m, p))
        wts = pm.placeholder("w", shape=(p, n))
        i = pm.index(0, m - 1, name="i")
        j = pm.index(0, n - 1, name="j")
        k = pm.index(0, p - 1, name="k")
        inp_ik = pm.var_index(inp, [i, k], name="in[i,k]")
        w_kj = pm.var_index(wts, [k, j], name="w[k,j]")
        slice_mul = (inp_ik * w_kj).set_name("w[i,k]*in[k,j]")
        out = pm.sum([i, k, j], slice_mul, name="out")
    graph_res = graph("out", mapping)
    assert graph_res == numpy_res
예제 #14
0
def test_context():
    with pm.Node() as graph:
        a = pm.placeholder(name='a')
        b = pm.placeholder(name='b')
        c = pm.placeholder(name='c')
        x = a * b + c
    actual = graph(x, {a: 4, 'b': 7}, c=9)
    assert actual == 37
예제 #15
0
def test_contains():
    with pm.Node() as graph:
        test = pm.placeholder()
        alphabet = pm.variable('abc')
        contains = pm.contains(alphabet, test)

    assert graph(contains, {test: 'a'})
    assert not graph(contains, {test: 'x'})
예제 #16
0
def test_try_not_caught():
    with pm.Node() as graph:
        a = pm.placeholder()
        b = pm.placeholder()
        c = pm.try_(a / b, [(ValueError, 'value-error')])

    with pytest.raises(ZeroDivisionError):
        graph(c, {a: 1, b: 0})
예제 #17
0
def test_assert_with_value():
    with pm.Node() as graph:
        x = pm.placeholder(name='x')
        assertion = pm.assert_(x < 10, val=2 * x)

    assert graph(assertion, x=9) == 18
    with pytest.raises(AssertionError):
        graph(assertion, x=11)
예제 #18
0
def test_binary_operators_right(binary_operators):
    operator, a, b, expected = binary_operators
    with pm.Node() as graph:
        _b = pm.parameter(default=b)
        operation = eval('a %s _b' % operator)

    actual = graph(operation)
    assert actual == expected, "expected %s %s %s == %s but got %s" % \
        (a, operator, b, expected, actual)
예제 #19
0
def test_duplicate_value():
    with pm.Node() as graph:
        a = pm.placeholder('a')

    with pytest.raises(ValueError):
        graph([], {a: 1}, a=1)

    with pytest.raises(ValueError):
        graph([], {a: 1, 'a': 1})
예제 #20
0
def test_strided_index(lbound, ubound, stride):

    with pm.Node(name="strided") as graph:
        idx = pm.index(lbound, ubound - 1, stride=stride, name="i")

    ref = np.arange(lbound, ubound, stride)
    res = graph("i", {})

    np.testing.assert_allclose(ref, res)
예제 #21
0
def linear_reg_graph_mg():
    graph_name = "linear_reg"
    with pm.Node(name="linear_reg") as graph:
        m = pm.placeholder("m")
        x_ = pm.placeholder("x", shape=(m), type_modifier="input")
        y_ = pm.placeholder("y", type_modifier="input")
        w_ = pm.placeholder("w", shape=(m), type_modifier="state")
        i = pm.index(0, m - 1, name="i")
        h = pm.sum([i], (x_[i] * w_[i]).set_name("x*w"), name="h")
        d = (h - y_).set_name("h-y")
        g = (d * x_[i]).set_name("d*x")
        with pm.Node(name="grad_update") as graph2:
            mu = pm.parameter(name="mu", default=1.0)
            p1 = mu * g[i]
            p2 = w_[i]
            w_prime = (p2 - p1).set_name("res1")
        tout = (w_prime * 1.0).set_name("res")
    return graph
예제 #22
0
def test_transpose(in_shape):
    x = np.random.randint(0, 30, np.prod(in_shape)).reshape(in_shape)

    with pm.Node(name="tpose") as graph:
        x_pm = pm.input(name="x", shape=in_shape)
        pm.transpose(x_pm, (1,0), name="o")

    in_dict = {"x": x}
    res = graph("o", in_dict)
    np.testing.assert_allclose(x.T, res)
예제 #23
0
def test_flip(in_shape, axis):
    x = np.random.randn(*in_shape).astype(np.float32)

    with pm.Node(name="flip_op") as graph:
        data = pm.input(name="input", shape=x.shape)
        out = pm.flip(data, axis, name="res")

    np_y = np.flip(x, axis)
    pm_y = graph("res", {"input": x})
    np.testing.assert_allclose(np_y, pm_y)
예제 #24
0
def test_conditional():
    with pm.Node() as graph:
        x = pm.parameter(default=4)
        y = pm.placeholder(name='y')
        condition = pm.placeholder(name='condition')
        z = pm.predicate(condition, x, y)

    assert graph(z, condition=False, y=5) == 5
    # We expect a value error if we evaluate the other branch without a placeholder
    with pytest.raises(ValueError):
        print(graph(z, condition=False))
예제 #25
0
def test_try(context, expected):
    finally_reached = []

    with pm.Node() as graph:
        a = pm.placeholder('a')
        b = pm.placeholder('b')
        c = pm.try_(a / b, [(ZeroDivisionError, 'zero-division')],
                    pm.func_op(lambda: finally_reached.append('done')))

    assert graph(c, context) == expected
    assert finally_reached
예제 #26
0
def test_reshape(in_shape, out_shape):
    x = np.zeros(in_shape).astype(np.float32)

    with pm.Node(name="reshape_op") as graph:
        data = pm.input(name="input", shape=x.shape)
        out = pm.reshape(data, out_shape, name="res")

    pm_y = graph("res", {"input": x})
    np_y = np.reshape(x, out_shape)
    np.testing.assert_allclose(np_y, pm_y)
    assert np_y.shape == pm_y.shape
예제 #27
0
def test_stack_trace():
    with pm.Node() as graph:
        a = pm.placeholder()
        b = pm.placeholder()
        c = a / b

    try:
        graph(c, {a: 1, b: 0})
        raise RuntimeError("did not raise ZeroDivisionError")
    except ZeroDivisionError as ex:
        assert isinstance(ex.__cause__, pm.EvaluationError)
예제 #28
0
def linear_reg():
    with pm.Node(name="linear_reg") as graph:
        m = pm.placeholder("m")
        x = pm.placeholder("x", shape=(m), type_modifier="input")
        y = pm.placeholder("y", type_modifier="input")
        w = pm.placeholder("w", shape=(m), type_modifier="state")
        mu = pm.parameter(name="mu", default=1.0)
        i = pm.index(0, (graph["m"]-1).set_name("m-1"), name="i")
        h = pm.sum([i], (x[i] * w[i]).set_name("x*w"), name="h")
        d = (h-y).set_name("h-y")
        g = (d*x).set_name("d*x")
        w_ = (w - (mu*g).set_name("mu*g")).set_name("w-mu*g")
예제 #29
0
def test_flatten():
    shape = (2, 3, 4, 5)
    a = np.random.random_sample(shape).astype(np.float32)
    for i in range(len(shape)):
        with pm.Node(name="flatten_op") as graph:
            x = pm.state("x", shape=shape)
            x_us = pm.flatten(x, axis=i, name="res")

        new_shape = (1, -1) if i == 0 else (np.prod(shape[0:i]).astype(int), -1)
        b = np.reshape(a, new_shape)
        pm_b = graph("res", {"x": a})
        np.testing.assert_allclose(pm_b, b)
예제 #30
0
def test_second():
    test_a = np.array([1, 2, 3, 4])
    test_b = np.array([5, 6, 7, 8])
    with pm.Node(name="main") as graph:
        a = pm.parameter(default=6, name="a")
        b = pm.parameter(default=5, name="b")
        a = (a + b).set_name("a_mul_b")
        with pm.Node(name="graph2") as graph2:
            n = pm.placeholder("n")
            b = pm.placeholder("b")
            e = pm.parameter(default=6, name="e")
            l = pm.state("test", shape=(n, b))
            i = pm.index(0, graph2["n"] - 1)
            j = pm.index(0, graph2["b"] - 1)
            lij = pm.var_index(l, [i, j], "lij")

            x = (l * e).set_name("placeholdermult")

        _ = graph2("test", {l: np.arange(16).reshape((-1, 4))})
        _ = graph2("lij", {l: np.arange(16).reshape((-1, 4))})
        _ = graph2("placeholdermult", {l: np.arange(16).reshape((-1, 4))})