def test_load_linear_regressor(m_): shape_dict = {"m": m_} m = pm.parameter("m") mu = pm.parameter(name="mu", default=1.0) x = pm.input("x", shape=(m)) y = pm.input("y") w = pm.state("w", shape=(m)) graph = pm.linear_regressor_train(x, w, y, mu, m) test_graph, input_info, out_info, keys = linear(m=m_, coarse=True) assert len(test_graph.nodes.keys()) == len(graph.nodes.keys()) assert op_counts(test_graph) == op_counts(graph) shape_val_pass = pm.NormalizeGraph(shape_dict) new_graph = shape_val_pass(graph) test_res = new_graph(keys, input_info) np.testing.assert_allclose(test_res, out_info["w"]) test_graph_lowered, input_info, new_out_info, keys = linear(m=m_) flatten_pass = pm.Lower({}) test_flatten_pass = pm.Lower({}) flattened_g = flatten_pass(new_graph) ref_lowered = test_flatten_pass(test_graph_lowered, {}) assert len(ref_lowered.nodes.keys()) == len(flattened_g.nodes.keys()) assert op_counts(ref_lowered) == op_counts(flattened_g) all_vals = flattened_g(keys, input_info) np.testing.assert_allclose(new_out_info["w"], all_vals)
def reco(m_=3, n_=3, k_=2): with pm.Node(name="recommender") as graph: mu = pm.placeholder("mu") m = pm.placeholder("m") n = pm.placeholder("n") k = pm.placeholder("k") x1 = pm.placeholder("x1", shape=k) x2 = pm.placeholder("x2", shape=k) r1 = pm.placeholder("r1", shape=m) y1 = pm.placeholder("y1", shape=m) r2 = pm.placeholder("r2", shape=n) y2 = pm.placeholder("y2", shape=n) w1 = pm.placeholder("w1", shape=(m, k)) w2 = pm.placeholder("w2", shape=(n, k)) i = pm.index(0, m - 1, name="i") j = pm.index(0, n - 1, name="j") l = pm.index(0, k - 1, name="l") h1_sum = pm.sum([l], (w1[i, l] * x2[l]).set_name("w1*x2")).set_name("h1_sum") h1 = (h1_sum[i] * r1[i]).set_name("h1") h2_sum = pm.sum([l], (x1[l] * w2[j, l]).set_name("x1*w2")).set_name("h2_sum") h2 = (h2_sum[j] * r2[j]).set_name("h2") # d1 = (h1[i] - y1[i]).set_name("d1") d2 = (h2[j] - y2[j]).set_name("d2") g1 = (d1[i] * x2[l]).set_name("g1") g2 = (d2[j] * x1[l]).set_name("g2") w1_ = (w1[i, l] - g1[i, l]).set_name("w1_") w2_ = (w2[i, l] - g2[i, l]).set_name("w2_") shape_val_pass = pm.NormalizeGraph({"m": m_, "n": n_, "k": k_}) new_graph, res = shape_val_pass(graph) return new_graph
def generate_tvm(graph, input_dict, filepath, context_dict=None): assert len(input_dict) > 0 shape_dict = {k: v.shape if isinstance(v, np.ndarray) else v for k,v in input_dict.items()} shape_dict['populate'] = False shape_pass = pm.NormalizeGraph(shape_dict) lower_pass = pm.Lower(TVM_OPS) tvm_pass = TVMPass() shaped = shape_pass(graph) lowered = lower_pass(shaped) result = tvm_pass(lowered) return tvm_pass.tvm_ir['tvm_code']
def test_linear(m_): shape_dict = {"m": m_} graph, input_info, out_info, keys = linear(**shape_dict, coarse=True) shape_val_pass = pm.NormalizeGraph(shape_dict) new_graph = shape_val_pass(graph) test_res = new_graph(keys, input_info) np.testing.assert_allclose(test_res, out_info["w"]) graph, input_info, new_out_info, keys = linear(**shape_dict) flatten_pass = pm.Lower({}) flattened_g = flatten_pass(new_graph) all_vals = flattened_g(keys, input_info) np.testing.assert_allclose(new_out_info["w"], all_vals)
def generate_dnnweaver(graph, input_dict, filepath, debug=False, add_kwargs=False, context_dict=None): shape_dict = { k: v.shape if isinstance(v, np.ndarray) else v for k, v in input_dict.items() } shape_dict['populate'] = False shape_pass = pm.NormalizeGraph(shape_dict, debug=debug) lower_pass = pm.Lower(DNNWEAVER_OPS, debug=debug) dnnw_pass = DNNWeaverPass(debug=debug) shaped = shape_pass(graph) lowered = lower_pass(shaped) result = dnnw_pass(lowered) return dnnw_pass.dnnw_ir['dnnweaver_code'], result
def test_reco(): m_ = 3 n_ = 3 k_ = 2 graph, input_info, out_info, keys = reco(m=m_, n=n_, k=k_, coarse=True) shape_val_pass = pm.NormalizeGraph({"m": m_, "n": n_, "k": k_}) new_graph = shape_val_pass(graph) test_res = new_graph(keys, input_info) np.testing.assert_allclose(test_res[0], out_info["w1"]) np.testing.assert_allclose(test_res[1], out_info["w2"]) graph, input_info, new_out_info, keys = reco(m=m_, n=n_, k=k_) flatten_pass = pm.Lower({}) flattened_g = flatten_pass(new_graph) all_vals = flattened_g(keys, input_info) out1 = np.asarray(list(all_vals[0:6])).reshape(new_out_info["w2"].shape) out2 = np.asarray(list(all_vals[6:])).reshape(new_out_info["w2"].shape) np.testing.assert_allclose(new_out_info["w1"], out1) np.testing.assert_allclose(new_out_info["w2"], out2)
def test_load_nested_linear_regressor(m_): shape_dict = {"m": m_} with pm.Node(name="nested_linear") as graph: m = pm.parameter(name="m") mu = pm.parameter(name="mu", default=1.0) x = pm.input("x", shape=(m)) y = pm.input("y") w = pm.state("w", shape=(m)) pm.linear_regressor_train(x, w, y, mu, m, name="linear_regressor") j = pm.index(0, m-1, name="j") tw = (w[j] - 4).set_name("tw") test_graph, input_info, out_info, keys = linear(m=m_, coarse=True) shape_val_pass = pm.NormalizeGraph(shape_dict) new_graph = shape_val_pass(graph) test_res = new_graph("tw", input_info) np.testing.assert_allclose(test_res, (out_info["w"] - 4)) ref_graph, input_info, new_out_info, keys = linear(m=m_) flatten_pass = pm.Lower({}) keys = [f"tw/tw({i},)" for i in range(m_)] flattened_g = flatten_pass(new_graph) all_vals = flattened_g(keys, input_info)
def generate_tabla(graph, input_dict, filepath, context_dict=None, add_kwargs=False, debug=True): assert len(input_dict) > 0 shape_pass = pm.NormalizeGraph(input_dict, debug=debug) context_dict = context_dict or {} lower_pass = pm.Lower({}, debug=debug) print(f"Starting graph normalization...") shaped = shape_pass(graph) print(f"Finished graph normalization. Executing lower pass.") lowered = lower_pass(shaped) print(f"Finished graph lowering, generating TABLA dfg.") for k in list(context_dict.keys()): if k not in lowered.nodes: context_dict.pop(k) tabla_pass = TablaPass(context_dict, add_kwargs=add_kwargs, debug=debug) res = tabla_pass(lowered) print(f"Finished generating TABLA dfg, now storing to JSON file at {filepath}.") tabla_nodes = [node for _, node in tabla_pass.dfg.items()] with open(filepath, "w") as f: json.dump(tabla_nodes, f, indent=4) return tabla_nodes, res
def lenet(lenet_type="lenet5", coarse=True, debug=False): with pm.Node(name="lenet") as graph: n = pm.parameter(name="n") c = pm.parameter(name="ic") ih = pm.parameter(name="ih") iw = pm.parameter(name="iw") nf1 = pm.parameter(name="nf1") kh1 = pm.parameter(name="kh1") kw1 = pm.parameter(name="kw1") data = pm.input(name="data", shape=(n, c, ih, iw)) w1 = pm.state(name="w1", shape=(nf1, c, kh1, kw1)) b1 = pm.state(name="b1", shape=(nf1)) s1 = pm.parameter(name="s1") p1 = pm.parameter(name="p1") c1 = pm.output(name="c1", shape=(n, nf1, 28, 28)) a1 = pm.output(name="a1", shape=(n, nf1, 28, 28)) l1 = pm.output(name="l1", shape=(n, nf1, 14, 14)) pm.conv_bias(data, w1, b1, c1, s1, p1) pm.elem_tanh(c1, a1, shape=a1.shape) pm.avg_pool2d(a1, l1, 2, 2, 2, 0) nf2 = pm.parameter(name="nf2") kh2 = pm.parameter(name="kh2") kw2 = pm.parameter(name="kw2") w2 = pm.state(name="w2", shape=(nf2, nf1, kh2, kw2)) b2 = pm.state(name="b2", shape=(nf2)) s2 = pm.parameter(name="s2") p2 = pm.parameter(name="p2") c2 = pm.output(name="c2", shape=(n, nf2, 10, 10)) a2 = pm.output(name="a2", shape=(n, nf2, 10, 10)) l2 = pm.output(name="l2", shape=(n, nf2, 5, 5)) pm.conv_bias(l1, w2, b2, c2, s2, p2) pm.elem_tanh(c2, a2, shape=a2.shape) pm.avg_pool2d(a2, l2, 2, 2, 2, 0) nf3 = pm.parameter(name="nf3") kh3 = pm.parameter(name="kh3") kw3 = pm.parameter(name="kw3") w3 = pm.state(name="w3", shape=(nf3, nf2, kh3, kw3)) b3 = pm.state(name="b3", shape=(nf3)) s3 = pm.parameter(name="s3") p3 = pm.parameter(name="p3") c3 = pm.output(name="c3", shape=(n, nf3, 1, 1)) a3 = pm.output(name="a3", shape=(n, nf3, 1, 1)) pm.conv_bias(l2, w3, b3, c3, s3, p3) pm.elem_tanh(c3, a3, shape=a3.shape) f4 = pm.output(name="f4", shape=(n, nf3)) pm.coarse_flatten(a3, f4, axis=1, shape=f4.shape) m5 = pm.parameter(name="m5") n5 = pm.parameter(name="n5") f5 = pm.output(name="f5", shape=(n, m5)) w5 = pm.state(name="w5", shape=(m5, n5)) # w5 = pm.state(name="w5", shape=(n5, m5)) a6 = pm.output(name="a5", shape=(n, m5)) b5 = pm.state(name="b5", shape=(n5, )) pm.gemm(f4, w5, b5, f5, shape=f5.shape, alpha=1.0, beta=0.0, transA=False, transB=True) pm.elem_tanh(f5, a6, shape=a6.shape) m7 = pm.parameter(name="m7") n7 = pm.parameter(name="n7") f7 = pm.output(name="f7", shape=(n, n7)) w7 = pm.state(name="w7", shape=(m7, n7)) # w7 = pm.state(name="w7", shape=(n7, m7)) b7 = pm.state(name="b7", shape=(n7, )) pm.gemm(a6, w7, b7, f7, shape=f7.shape, alpha=1.0, beta=0.0, transA=False, transB=False) out = pm.output(name="sm") pm.softmax(f7, out, axis=1) if coarse: in_info, keys, out_info = np_lenetv2() return graph, in_info, out_info, keys else: shape_dict = { "n": 1, "ic": 1, "ih": 32, "iw": 32, "nf1": 6, "kh1": 5, "kw1": 5, "s1": 1, "p1": 0, "nf2": 16, "kh2": 5, "kw2": 5, "s2": 1, "p2": 0, "nf3": 120, "kh3": 5, "kw3": 5, "s3": 1, "p3": 0, "m5": 120, "n5": 84, "m7": 84, "n7": 10 } shape_val_pass = pm.NormalizeGraph(shape_dict, debug=debug) new_graph = shape_val_pass(graph) in_info, keys, out_info = np_lenetv2(lowered=True) return new_graph, in_info, out_info, keys