Exemplo n.º 1
0
def reco(m_=3, n_=3, k_=2):
    with pm.Node(name="recommender") as graph:
        mu = pm.placeholder("mu")
        m = pm.placeholder("m")
        n = pm.placeholder("n")
        k = pm.placeholder("k")
        x1 = pm.placeholder("x1", shape=k)
        x2 = pm.placeholder("x2", shape=k)

        r1 = pm.placeholder("r1", shape=m)
        y1 = pm.placeholder("y1", shape=m)

        r2 = pm.placeholder("r2", shape=n)
        y2 = pm.placeholder("y2", shape=n)

        w1 = pm.placeholder("w1", shape=(m, k))
        w2 = pm.placeholder("w2", shape=(n, k))
        i = pm.index(0, m - 1, name="i")
        j = pm.index(0, n - 1, name="j")
        l = pm.index(0, k - 1, name="l")
        h1_sum = pm.sum([l], (w1[i, l] * x2[l]).set_name("w1*x2")).set_name("h1_sum")
        h1 = (h1_sum[i] * r1[i]).set_name("h1")
        h2_sum = pm.sum([l], (x1[l] * w2[j, l]).set_name("x1*w2")).set_name("h2_sum")
        h2 = (h2_sum[j] * r2[j]).set_name("h2")
        #
        d1 = (h1[i] - y1[i]).set_name("d1")
        d2 = (h2[j] - y2[j]).set_name("d2")
        g1 = (d1[i] * x2[l]).set_name("g1")
        g2 = (d2[j] * x1[l]).set_name("g2")
        w1_ = (w1[i, l] - g1[i, l]).set_name("w1_")
        w2_ = (w2[i, l] - g2[i, l]).set_name("w2_")

    shape_val_pass = pm.NormalizeGraph({"m": m_, "n": n_, "k": k_})
    new_graph, res = shape_val_pass(graph)
    return new_graph
Exemplo n.º 2
0
def test_multi_shapes():
    m_ = 5
    n_ = 4
    p_ = 3
    inp_ = np.random.randint(1, 5, (m_, p_))
    w_ = np.random.randint(1, 5, (p_, n_))
    mapping = {"m": m_, "n": n_, "p": p_, "in": inp_, "w": w_}

    numpy_res1 = np.empty(shape=(m_, p_, n_))
    indices = []
    for i in range(m_):
        for k in range(p_):
            for j in range(n_):
                numpy_res1[i][k][j] = inp_[i][k] * w_[k][j]
                indices.append(tuple([i, k, j]))
    numpy_res = np.sum(numpy_res1)

    with pm.Node(name="mmul") as graph:
        m = pm.placeholder("m")
        n = pm.placeholder("n")
        p = pm.placeholder("p")
        inp = pm.placeholder("in", shape=(m, p))
        wts = pm.placeholder("w", shape=(p, n))
        i = pm.index(0, m - 1, name="i")
        j = pm.index(0, n - 1, name="j")
        k = pm.index(0, p - 1, name="k")
        inp_ik = pm.var_index(inp, [i, k], name="in[i,k]")
        w_kj = pm.var_index(wts, [k, j], name="w[k,j]")
        slice_mul = (inp_ik * w_kj).set_name("w[i,k]*in[k,j]")
        out = pm.sum([i, k, j], slice_mul, name="out")
    graph_res = graph("out", mapping)
    assert graph_res == numpy_res
Exemplo n.º 3
0
def test_multi_dim():
    with pm.Node(name="elem4") as graph:
        m = pm.parameter(name="m")
        n = pm.parameter(name="n")
        x = pm.input("x", shape=(m, n))
        w = pm.state("w", shape=(m, n))
        i = pm.index(0, m - 1, name="i")
        j = pm.index(0, n - 1, name="j")
        w[i, j] = (w[i, j] * x[i, j])
    m_ = 3
    n_ = 4
    x_ = np.random.randint(0, 10, m_ * n_).reshape((m_, n_))
    w_ = np.random.randint(0, 10, m_ * n_).reshape((m_, n_))
    coarse_eval = graph("w", x=x_, w=w_)
    np_result = x_ * w_
    np.testing.assert_allclose(coarse_eval, np_result)
    shape_pass = NormalizeGraph({"m": m_, "n": n_})
    graph_shapes = shape_pass(graph)
    shape_res = graph_shapes("w", x=x_, w=w_)
    np.testing.assert_allclose(shape_res, np_result)
    lower_pass = Lower({})
    lowered_graph = lower_pass(graph_shapes)
    input_info = {}
    for i in range(m_):
        for j in range(n_):
            input_info[f"w/w({i}, {j})"] = w_[i, j]
            input_info[f"x/x({i}, {j})"] = x_[i, j]

    fine_grained_eval = lowered_graph("w/w(2, 3)", input_info)
    assert fine_grained_eval == np_result[2, 3]
Exemplo n.º 4
0
def test_multi_dim_op_slice():
    with pm.Node(name="elem2") as graph:
        m = pm.parameter(name="m")
        n = pm.parameter(name="n")
        mu = pm.parameter(name="mu", default=2.0)
        x = pm.input(name="x", shape=(m, n))
        w = pm.state(name="w", shape=(m, n))
        i = pm.index(0, m - 1, name="i")
        j = pm.index(0, n - 1, name="j")
        out = (x[i, j] * w[i, j]).set_name("w_out")
        w[i, j] = (mu * (out[i, j] - w[i, j]))
    m_ = 3
    n_ = 2
    x_ = np.random.randint(0, 10, m_ * n_).reshape((m_, n_))
    w_ = np.random.randint(0, 10, m_ * n_).reshape((m_, n_))
    coarse_eval = graph("w", x=x_, w=w_)
    np_result = (x_ * w_ - w_) * 2.0
    np.testing.assert_allclose(coarse_eval, np_result)
    shape_pass = NormalizeGraph({"m": m_, "n": n_})
    graph_shapes = shape_pass(graph)
    shape_res = graph_shapes("w", x=x_, w=w_)
    np.testing.assert_allclose(shape_res, np_result)
    lower_pass = Lower({})
    lowered_graph = lower_pass(graph_shapes)
    input_info = {}
    for i in range(m_):
        for j in range(n_):
            input_info[f"w/w({i}, {j})"] = w_[i, j]
            input_info[f"x/x({i}, {j})"] = x_[i, j]
    fine_grained_eval = lowered_graph("w/w(2, 1)", input_info)
    assert fine_grained_eval == np_result[2, 1]
Exemplo n.º 5
0
 def define_graph(self, a, w, out):
     indices = _get_single_node_indices(a)
     sum_idx = indices[-1]
     o_idx = pm.index(0, w.shape[0]-1) if w.shape[-1] == a.shape[-1] else pm.index(0, w.shape[1]-1)
     w_idx = (o_idx, sum_idx) if w.shape[-1] == a.shape[-1] else (sum_idx, o_idx)
     out_idx = indices[:-1] + (o_idx,)
     out[out_idx] = pm.sum([sum_idx], a[indices]*w[w_idx])
Exemplo n.º 6
0
    def define_graph(self,
                     inp,
                     weight,
                     bias,
                     grad,
                     inp_grad,
                     weight_grad,
                     bias_grad,
                     optimizer,
                     optimizer_kwargs,
                     stride=1,
                     pad=0,
                     dilation=1):
        min_sizes = []
        k = len(grad.shape) - 2

        for d in range(k):
            min_sizes.append((grad.shape[d + 2] - 1) * stride - 2 * pad +
                             (weight.shape[-1] - 1) * dilation + 1)

        grad_input_padding = tuple(inp.shape[-k + d] - min_sizes[d]
                                   for d in range(k))
        assert grad_input_padding[0] == grad_input_padding[1]
        pm.conv_transpose_bias(grad,
                               weight,
                               bias,
                               inp_grad,
                               stride=stride,
                               pad=pad,
                               out_pad=grad_input_padding[0])
        inp_indices = tuple(pm.index(0, s - 1) for s in inp.shape)
        grad_indices = tuple(pm.index(0, s - 1) for s in grad.shape)
        weight_indices = tuple(pm.index(0, s - 1) for s in weight.shape)
        inp_transposed = pm.temp(name=f"transposed_{inp.name}",
                                 shape=(inp.shape[1], inp.shape[0],
                                        inp.shape[2], inp.shape[3]))
        grad_transposed = pm.state(name=f"transposed_{grad.name}",
                                   shape=(grad.shape[1], grad.shape[0],
                                          grad.shape[2], grad.shape[3]))
        wgt_grad_transposed = pm.temp(name=f"transposed_{weight.name}",
                                      shape=(weight.shape[1], weight.shape[0],
                                             weight.shape[2], weight.shape[3]))
        pm.tensor_transpose(inp, inp_transposed, perm=(1, 0, 2, 3))
        pm.tensor_transpose(grad, grad_transposed, perm=(1, 0, 2, 3))
        pm.conv(inp_transposed,
                grad_transposed,
                wgt_grad_transposed,
                stride=dilation,
                pad=pad,
                dilation=stride)
        pm.tensor_transpose(wgt_grad_transposed,
                            weight_grad,
                            perm=(1, 0, 2, 3))
        # Weight update
        OPTIMIZERS[optimizer](weight, weight_grad, **optimizer_kwargs)
        pm.reduce_sum(grad, bias_grad)
        OPTIMIZERS[optimizer](bias, bias_grad, **optimizer_kwargs)
Exemplo n.º 7
0
 def define_graph(self, x, out):
     # indices = tuple([pm.index(0, s - 1) if s > 1 else 0 for s in shape])
     indices = _get_single_node_indices(out, shape=out.shape)
     m = pm.index(0, x.shape[2] - 1)
     n = pm.index(0, x.shape[3] - 1)
     h = x.shape[2]
     w = x.shape[3]
     out[indices] = (1 / (h * w)) * pm.sum([m, n], x[indices[0], indices[1],
                                                     m, n])
Exemplo n.º 8
0
    def define_graph(self,
                     a,
                     b,
                     y,
                     alpha=1.0,
                     beta=0.0,
                     transA=False,
                     transB=False,
                     strict_shapes=False):
        if strict_shapes:
            assert b.shape[0] == a.shape[1]
            assert len(y.shape) == 0 or y.shape[0] == a.shape[0]
            assert bool(transB) == bool(transA) and bool(
                transA
            ) == False, f"Strict shape check failed: {transA} != {transB}"

        if transA:
            i = pm.index(0, a.shape[1] - 1)
            j = pm.index(0, b.shape[0] - 1)
            k = pm.index(0, b.shape[1] - 1)
            y[i, k] = pm.sum([j], a[j, i] * b[j, k])
        elif transB:
            i = pm.index(0, a.shape[0] - 1)
            j = pm.index(0, b.shape[1] - 1)
            k = pm.index(0, b.shape[0] - 1)
            y[i, k] = pm.sum([j], a[i, j] * b[k, j])
        else:
            i = pm.index(0, a.shape[0] - 1)
            j = pm.index(0, b.shape[0] - 1)
            k = pm.index(0, b.shape[1] - 1)
            y[i, k] = pm.sum([j], a[i, j] * b[j, k])
Exemplo n.º 9
0
def get_concat(*inputs, axis=None, shape=None, name=None, out=None):
    if not out:
        out = pm.output(name=name, shape=shape)
    indices = [pm.index(0, s - 1) if s > 1 else 0 for s in shape]
    for idx, i in enumerate(inputs):
        indices[axis] = pm.index(idx * i.shape[axis],
                                 (idx + 1) * i.shape[axis] - 1)
        j = pm.index(0, i.shape[axis] - 1)
        out[tuple(indices)] = i[tuple(indices[:axis] + [j] +
                                      indices[axis + 1:])]
    return out
Exemplo n.º 10
0
    def define_graph(self, data, out):
        out.set_shape(
            (data.shape[0] * data.shape[1] * data.shape[2] * data.shape[3], ))
        m = data.shape[1]
        n = data.shape[2]
        p = data.shape[3]

        i = pm.index(0, data.shape[0] - 1, name="i")
        j = pm.index(0, m - 1, name="j")
        k = pm.index(0, n - 1, name="k")
        l = pm.index(0, p - 1, name="l")
        out[((i * m + j) * n + k) * p + l] = data[i, j, k, l]
Exemplo n.º 11
0
    def define_graph(self, inp, weight, bias, grad, inp_grad, weight_grad,
                     bias_grad, optimizer, optimizer_kwargs):
        transA = False
        transB = False

        if grad.shape[1] != weight.shape[0]:
            indices = tuple([pm.index(0, s - 1) for s in weight.shape])
            # weight_transposed = pm.temp(name=f"{weight.name}_transposed", shape=(weight.shape[1], weight.shape[0]))
            weight_transposed = pm.state(name=f"{weight.name}_transposed",
                                         shape=(weight.shape[1],
                                                weight.shape[0]))
            weight_transposed[indices[1], indices[0]] = weight[indices]
            pm.gemm_no_bias(grad,
                            weight_transposed,
                            inp_grad,
                            transA=transA,
                            transB=transB,
                            strict_shapes=True)
        else:
            pm.gemm_no_bias(grad,
                            weight,
                            inp_grad,
                            transA=transA,
                            transB=transB,
                            strict_shapes=True)

        if grad.shape[0] != inp.shape[1]:
            indices = tuple([pm.index(0, s - 1) for s in inp.shape])
            # inp_transposed = pm.temp(name=f"{inp.name}_transposed", shape=(inp.shape[1], inp.shape[0]))
            inp_transposed = pm.state(name=f"{inp.name}_transposed",
                                      shape=(inp.shape[1], inp.shape[0]))
            inp_transposed[indices[1], indices[0]] = inp[indices]
            pm.gemm_no_bias(inp_transposed,
                            grad,
                            weight_grad,
                            transA=transA,
                            transB=transB,
                            strict_shapes=True)
        else:
            pm.gemm_no_bias(inp,
                            grad,
                            weight_grad,
                            transA=transA,
                            transB=transB,
                            strict_shapes=True)

        # Weight update
        assert weight_grad.shape == weight.shape

        OPTIMIZERS[optimizer](weight, weight_grad, **optimizer_kwargs)

        pm.reduce_sum(grad, bias_grad)
        OPTIMIZERS[optimizer](bias, bias_grad, **optimizer_kwargs)
Exemplo n.º 12
0
def dilate(var: pm.placeholder, strides, name=None):
    n = len(var.shape)
    assert len(strides) == n
    out_shape = ()
    nz_indices = ()
    shape_idx = ()

    for i in range(n):
        out_shape += ((var.shape[i] - 1) * strides[i] + 1, )
        nz_indices += (pm.index(0, out_shape[i] - 1, stride=strides[i]), )
        shape_idx += (pm.index(0, out_shape[i] - 1), )

    padded = pm.temp(name=name, shape=out_shape)
    padded[shape_idx] = 0
    padded[(shape_idx[0])] = 0
Exemplo n.º 13
0
 def define_graph(self, x, w, y, y_pred, mu, m):
     i = pm.index(0, (m - 1).set_name("m-1"), name="i")
     h = pm.temp(name="h", shape=(m))
     h = pm.sigmoid(pm.sum([i], (x[i] * w[i]), name="h"))
     d = (h - y).set_name("h-y")
     g = (d * x[i]).set_name("d*x")
     w[i] = w[i] - mu * g[i]
Exemplo n.º 14
0
 def define_graph(self, data, out, axis=0):
     out.set_shape(data.shape)
     i = pm.index(0, data.shape[axis] - 1, name="i")
     indices = [
         pm.index(0, s - 1, name=f"{data.name}[{i}]")
         for i, s in enumerate(data.shape)
     ]
     indices[axis] = i
     indices = tuple(indices)
     maxes = pm.max([i], data[indices], name="maxes")
     lse_stable = pm.log(pm.sum([i],
                                pm.exp(
                                    (data[indices] - maxes[indices[0]]))),
                         name="lse_stable")
     out[indices] = data[indices] - maxes[indices[0]] - lse_stable[
         indices[0]]
Exemplo n.º 15
0
 def define_graph(self, data, out, axes=(0, ), keepdims=True):
     # indices = _get_single_node_indices(data)
     indices = tuple([pm.index(0, s - 1) for s in data.shape])
     sum_idx = tuple([indices[i] for i in axes])
     out_idx = tuple(
         [indices[i] for i in range(len(indices)) if i not in axes])
     out[out_idx] = pm.sum([sum_idx], data[indices])
Exemplo n.º 16
0
def test_flatten_result_length():
    with pm.Node(name="linear_reg") as graph:
        m = pm.placeholder("m", type_modifier="param")
        x = pm.placeholder("x", shape=(m), type_modifier="input")
        y = pm.placeholder("y", type_modifier="input")
        w = pm.placeholder("w", shape=(m), type_modifier="state")
        mu = pm.placeholder("mu", default_val=1.0, type_modifier="param")
        i = pm.index(0, (m - 1).set_name("m-1")).set_name("i")
        h = pm.sum([i], (x[i] * w[i]).set_name("x*w"), name="h")
        d = (h - y).set_name("h-y")
        g = (d * x[i]).set_name("d*x")
        w_ = (w[i] - (mu * g[i]).set_name("mu*g")).set_name(("w_out"))

    shape_val_pass = NormalizeGraph({"m": 3})
    count_pass = CountNodes()
    flatten_pass = Lower({})

    new_graph = shape_val_pass(graph)

    flattened_g = flatten_pass(new_graph)
    x = np.random.randint(0, 10, 10)
    y = np.random.randint(0, 10, 1)[0]
    w = np.random.randint(0, 10, 10)

    orig_graph = count_pass(flattened_g)
Exemplo n.º 17
0
def test_single_dim_op_slice():
    with pm.Node(name="elem3") as graph:
        m = pm.parameter(name="m")
        x = pm.input("x", shape=m)
        w = pm.state("w", shape=m)
        i = pm.index(0, m - 1, name="i")
        out = (w[i] * x[i])
        w[i] = (out[i] - w[i])

    m_ = 3
    x_ = np.random.randint(0, 10, m_)
    w_ = np.random.randint(0, 10, m_)

    coarse_eval = graph("w", x=x_, w=w_)
    np_result = x_ * w_ - w_
    np.testing.assert_allclose(coarse_eval, np_result)

    shape_pass = NormalizeGraph({"m": 3})
    graph_shapes = shape_pass(graph)
    shape_res = graph_shapes("w", x=x_, w=w_)

    np.testing.assert_allclose(shape_res, np_result)
    lower_pass = Lower({})
    lowered_graph = lower_pass(graph_shapes)
    input_info = {f"w/w({i},)": w_[i] for i in range(len(w_))}
    input_info.update({f"x/x({i},)": x_[i] for i in range(len(x_))})
    fine_grained_eval = lowered_graph("w/w(2,)", input_info)
    assert fine_grained_eval == np_result[2]
Exemplo n.º 18
0
def test_linear_deserialize():

    graph_name = "linear_reg1"
    with pm.Node(name=graph_name) as graph:
        m = pm.placeholder("m")
        x_ = pm.placeholder("x", shape=(m))
        y_ = pm.placeholder("y")
        w_ = pm.placeholder("w", shape=(m))
        mu = pm.parameter(name="mu", default=1.0)
        i = pm.index(0, (m - 1).set_name("m-1"), name="i")
        h = pm.sum([i], (x_[i] * w_[i]).set_name("x*w"), name="h")
        d = (h - y_).set_name("h-y")
        g = (d * x_[i]).set_name("d*x")
        mug = (mu * g[i]).set_name("mu*g[i]")
        w_ = ((w_[i]) - mug).set_name("w_out")
    x = np.random.randint(0, 10, 10)
    y = np.random.randint(0, 10, 1)[0]
    w = np.random.randint(0, 10, 10)

    graph_res = graph("w_out", {"x": x, "y": y, "w": w})
    actual_res = w - ((np.sum(x * w) - y) * x) * 1.0

    np.testing.assert_allclose(graph_res, actual_res)
    cwd = Path(f"{__file__}").parent
    base_path = f"{cwd}/pmlang_examples"
    full_path = f"{base_path}/outputs"
    pb_path = f"{full_path}/{graph_name}.srdfg"
    pm.pb_store(graph, full_path)
    node = pm.pb_load(pb_path)
    new_graph_res = node("w_out", {"x": x, "y": y, "w": w})
    np.testing.assert_allclose(graph_res, new_graph_res)
    np.testing.assert_allclose(actual_res, new_graph_res)
Exemplo n.º 19
0
def test_single_dim_norm():
    with pm.Node(name="elem1") as graph:
        m = pm.parameter("m")
        x = pm.input("x", shape=m)
        w = pm.state("w", shape=m)
        i = pm.index(0, m - 1, name="i")
        w[i] = (w[i] * x[i])
    x_ = np.random.randint(0, 10, 3)
    w_ = np.random.randint(0, 10, 3)
    coarse_eval = graph("w", x=x_, w=w_)

    np_result = x_ * w_
    np.testing.assert_allclose(coarse_eval, np_result)
    shape_pass = NormalizeGraph({"m": 3})
    graph_shapes = shape_pass(graph)

    shape_res = graph_shapes("w", x=x_, w=w_)
    np.testing.assert_allclose(shape_res, np_result)
    lower_pass = Lower({})
    lowered_graph = lower_pass(graph_shapes)
    input_info = {f"w/w({i},)": w_[i] for i in range(len(w_))}
    input_info.update({f"x/x({i},)": x_[i] for i in range(len(x_))})
    fine_grained_eval = lowered_graph("w/w(1,)", input_info)

    assert fine_grained_eval == np_result[1]

    pb_path = f"{OUTPATH}/{graph.name}.srdfg"
    pm.pb_store(lowered_graph, OUTPATH)
    loaded_node = pm.pb_load(pb_path)
    input_info = {f"w/w({i},)": w_[i] for i in range(len(w_))}
    input_info.update({f"x/x({i},)": x_[i] for i in range(len(x_))})
    fine_grained_eval = loaded_node("w/w(1,)", input_info)
    assert fine_grained_eval == np_result[1]
Exemplo n.º 20
0
def test_multidim_sigmoid(m_):

    with pm.Node(name="logistic") as graph:
        m = pm.parameter(name="m")
        n = pm.parameter(name="n")
        x = pm.input("x", shape=(m))
        w = pm.state("w", shape=(m))
        i = pm.index(0, m - 1, name="i")
        o = pm.sigmoid(w[i] * x[i], name="out")
    x_ = np.random.randint(0, 10, m_).astype(np.float)
    w_ = np.random.randint(0, 10, m_).astype(np.float)
    shape_dict = {"m": m_}
    input_dict = {"x": x_, "w": w_}
    np_res = sigmoid((x_ * w_))

    coarse_eval = graph("out", input_dict)
    np.testing.assert_allclose(np_res, coarse_eval)
    lowered = set_shape_and_lower(graph, shape_dict)
    keys = [f"out/out({i},)" for i in range(m_)]

    x_ = np.random.randint(0, 10, m_).astype(np.float)
    w_ = np.random.randint(0, 10, m_).astype(np.float)
    input_dict = {}
    for i in range(m_):
        input_dict[f"x/x({i},)"] = x_[i]
        input_dict[f"w/w({i},)"] = w_[i]
    np_res = sigmoid((x_ * w_))

    lower_res = np.asarray(lowered(keys, input_dict)).reshape(np_res.shape)
    np.testing.assert_allclose(lower_res, np_res)
Exemplo n.º 21
0
 def define_graph(self, data, out, axis=0):
     out.set_shape(data.shape)
     i = pm.index(0, data.shape[axis] - 1, name="i")
     j = pm.index(0, data.shape[axis] - 1, name="j")
     indices = [
         pm.index(0, s - 1, name=f"{data.name}[{i}]")
         for i, s in enumerate(data.shape)
     ]
     indices_denom = indices
     indices_denom[axis] = j
     indices[axis] = i
     indices = tuple(indices)
     indices_denom = tuple(indices_denom)
     mval = pm.max([i], data[indices], name="max_test")
     e_x = pm.exp((data[indices] - mval), name="e_x")
     out[indices] = e_x[indices] / pm.sum(
         [indices_denom[axis]], e_x[indices_denom], name="denom")
Exemplo n.º 22
0
 def define_graph(self, x, w, y, mu, m):
     i = pm.index(0, (m - 1).set_name("m-1"), name="i")
     h = pm.sum([i], (x[i] * w[i]), name="h")
     c = (y * h).set_name("c")
     ny = (0 - y).set_name("ny")
     p = ((c > 1) * ny).set_name("p")
     g = (p * x[i]).set_name("g")
     w[i] = w[i] - mu * g[i]
Exemplo n.º 23
0
def create_svm_wifi(features, locations, lr=0.0001, deltav=1, train_size=7703):
    with pm.Node(name="svm_wifi") as graph:
        learning_rate = pm.parameter("learning_rate", default=lr)
        delta = pm.parameter("delta", default=deltav)
        n_features = pm.parameter("n_features", default=features)
        n_locations = pm.parameter("n_locations", default=locations)
        x_train = pm.input("x_train", shape=(n_features, ))
        y_train = pm.input("y_train", shape=(n_locations, ))
        y_train_inv = pm.input("y_train_inv", shape=(n_locations, ))
        weights = pm.state("weights", shape=(n_features, n_locations))

        i = pm.index(0, n_features - 1, name="i")
        j = pm.index(0, n_locations - 1, name="j")

        scores = pm.sum([i], (weights[i, j] * x_train[i]), name="scores")
        correct_class_score = pm.sum([j], (scores[j] * y_train[j]),
                                     name="correct_class_score")

        h = ((scores[j] - correct_class_score + delta).set_name("h") > 0)

        # margin = (pm.cast(np.float32, h[j]) * y_train_inv[j]).set_name("margin")
        margin = (h[j] * y_train_inv[j]).set_name("margin")
        valid_margin_count = pm.sum([j], margin[j], name="valid_margin_count")
        partial = (y_train[j] * valid_margin_count).set_name("partial")
        updated_margin = (margin[j] - partial[j]).set_name("updated_margin")
        # # #
        dW = (x_train[i] * updated_margin[j]).set_name("dW")
        weights[i, j] = (weights[i, j] -
                         learning_rate * dW[i, j]).set_name("weights_update")

    shape_dict = {"n_features": features, "n_locations": locations}
    input_info, keys, out_info = svm_wifi_datagen(features,
                                                  locations,
                                                  lr,
                                                  deltav,
                                                  lowered=True)

    cwd = Path(f"{__file__}").parent
    full_path = f"{cwd}/outputs"
    tabla_path = f"{full_path}/{graph.name}_{locations}_{features}_tabla.json"

    tabla_ir, tabla_graph = pm.generate_tabla(graph,
                                              shape_dict,
                                              tabla_path,
                                              context_dict=input_info,
                                              add_kwargs=True)
Exemplo n.º 24
0
def _get_reduce_node_indices(a, b, output, axis):
    if output.shape == pm.DEFAULT_SHAPES[0]:
        return tuple([])
    else:
        if not output.shape:
            raise RuntimeError
        indices = tuple([pm.index(0, s - 1) for s in output.shape])
        return indices
Exemplo n.º 25
0
def _get_single_node_indices(node, shape=None):
    if node.shape == pm.DEFAULT_SHAPES[0]:
        return tuple([])
    else:
        if not shape:
            shape = node.shape
        indices = tuple([pm.index(0, s - 1) for s in shape])
        return indices
Exemplo n.º 26
0
def test_index_op():
    with pm.Node(name="indexop") as graph:
        m = pm.parameter(name="m")
        n = pm.parameter(name="n")
        i = pm.index(0, m - 1, name="i")
        j = pm.index(0, n - 1, name="j")
        i_ = (i + 1).set_name("i_")

        k = (i + j).set_name("k")
    m_ = 5
    n_ = 3
    input_info = {"m": m_, "n": n_}
    res = graph("k", input_info)
    op1 = np.arange(0, m_)
    op2 = np.arange(0, n_)
    value = np.array(list(product(*(op1, op2))))
    value = np.array(list(map(lambda x: x[0] + x[1], value)))
    np.testing.assert_allclose(res, value)
Exemplo n.º 27
0
def test_strided_index(lbound, ubound, stride):

    with pm.Node(name="strided") as graph:
        idx = pm.index(lbound, ubound - 1, stride=stride, name="i")

    ref = np.arange(lbound, ubound, stride)
    res = graph("i", {})

    np.testing.assert_allclose(ref, res)
Exemplo n.º 28
0
def get_gemm(a,
             b,
             c=None,
             shape=None,
             name=None,
             alpha=None,
             beta=None,
             transA=None,
             transB=None,
             out=None):
    if not out:
        out = pm.output(shape=shape, name=name)
    if transB:
        assert len(b.shape) == 2
        b.shape = (b.shape[1], b.shape[0])
        transB = False

    if c:
        pm.gemm(a,
                b,
                c,
                out,
                alpha=alpha,
                beta=beta,
                transA=transA,
                transB=transB,
                strict_shapes=True)
    else:
        t_c = pm.temp(shape=shape)
        i = pm.index(0, shape[0] - 1)
        j = pm.index(0, shape[1] - 1)
        t_c[i, j] = 0
        pm.gemm(a,
                b,
                t_c,
                out,
                alpha=alpha,
                beta=beta,
                transA=transA,
                transB=transB,
                strict_shapes=True)
    return out
Exemplo n.º 29
0
    def define_graph(self, x, y, alpha, beta, bias, nsize):
        n = pm.index(0, x.shape[0] - 1)
        c = pm.index(0, x.shape[1] - 1)
        h = pm.index(0, x.shape[2] - 1)
        w = pm.index(0, x.shape[3] - 1)
        c_ = pm.index(0, x.shape[1] - 1)
        ext = pm.temp(name="extended", shape=tuple([*x.shape, x.shape[-3]]))

        bounds = pm.output(name="bounds", shape=(x.shape[1], x.shape[1]))
        radius = nsize // 2
        hbool = ((((x.shape[1] > (c + radius + 1)) * (c + radius)) +
                  (x.shape[1] <= (c + radius + 1)) * (x.shape[1] - 1)) >= c_)
        lbool = ((((c - radius) > 0) * (c - radius)) +
                 (((c - radius) <= 0) * 0) <= c_)
        bounds[c, c_] = hbool * lbool
        ext[n, c, h, w, c_] = x[n, c_, h, w] * bounds[c, c_]
        # y[n, c, h, w] = x[n,c,h,w] / ((bias + (alpha/nsize) * pm.sum([c_], ext[n, c, h, w, c_]**2))**beta)
        y[n, c, h, w] = x[n, c, h, w] / (
            (bias +
             (alpha / nsize) * pm.sum([c_], ext[n, c, h, w, c_]**2))**beta)
Exemplo n.º 30
0
def _get_binop_idx(node_a, node_b, out_node):
    # TODO: Figure out what to do about multiple dimensions with the same value
    cnt = 0
    op1 = []
    op2 = []
    all_ops = []

    for i in node_a.shape:
        if i == 1:
            op1.append(0)
            # all_ops.append(0)
        else:
            idx = pm.index(0, i - 1)
            op1.append(idx)
            all_ops.append(idx)
            cnt += 1

    for i in node_b.shape:
        if i in node_a.shape:
            idx = node_a.shape.index(i)
            op2.append(op1[idx])
        elif i == 1:
            op2.append(0)
            # all_ops.append(0)
        else:
            idx = pm.index(0, i - 1)
            op2.append(idx)
            all_ops.append(idx)
            cnt += 1
    if out_node.is_shape_finalized():
        all_ops = []
        for s in out_node.shape:
            if s in node_a.shape:
                idx = node_a.shape.index(s)
                all_ops.append(idx)
            else:
                assert s in node_b.shape, f"Output shape value {s} not in other shapes"
                idx = node_b.shape.index(s)
                all_ops.append(idx)

    return op1, op2, all_ops