def test_papi(target, dev): target = tvm.target.Target(target) if str(target.kind) == "llvm": metric = "PAPI_FP_OPS" elif str(target.kind) == "cuda": metric = "cuda:::event:shared_load:device=0" else: pytest.skip(f"Target {target.kind} not supported by this test") mod, params = mlp.get_workload(1) exe = relay.vm.compile(mod, target, params=params) vm = profiler_vm.VirtualMachineProfiler(exe, dev) data = tvm.nd.array(np.random.rand(1, 1, 28, 28).astype("float32"), device=dev) report = vm.profile( [data], func_name="main", collectors=[ tvm.runtime.profiling.PAPIMetricCollector({dev: [metric]}) ], ) assert metric in str(report) csv = read_csv(report) assert metric in csv.keys() assert any([float(x) > 0 for x in csv[metric]])
def test_vm(target, dev): dtype = "float32" x = relay.var("x", shape=(relay.Any(), relay.Any()), dtype=dtype) y = relay.var("y", shape=(relay.Any(), relay.Any()), dtype=dtype) mod = tvm.IRModule() mod["main"] = relay.Function([x, y], relay.add(x, y)) exe = relay.vm.compile(mod, target) vm = profiler_vm.VirtualMachineProfiler(exe, dev) data = np.random.rand(28, 28).astype("float32") report = vm.profile(data, data, func_name="main") assert "fused_add" in str(report) assert "Total" in str(report) assert "AllocTensorReg" in str(report) assert "AllocStorage" in str(report) csv = read_csv(report) assert "Hash" in csv.keys() # Ops should have a duration greater than zero. assert all([ float(dur) > 0 for dur, name in zip(csv["Duration (us)"], csv["Name"]) if name[:5] == "fused" ]) # AllocTensor or AllocStorage may be cached, so their duration could be 0. assert all([ float(dur) >= 0 for dur, name in zip(csv["Duration (us)"], csv["Name"]) if name[:5] != "fused" ])
def updateFollowing(csv): f = 0 friends = api.friends() csv = readCSV(csv) dicty = csv.copy() for friend in friends: if friend.screen_name not in csv.keys(): dicty[friend.screen_name] = 0 f += 1 writeCSV(dicty) print(f'{f} following updated')
def test_vm(target, dev): mod, params = mlp.get_workload(1) exe = relay.vm.compile(mod, target, params=params) vm = profiler_vm.VirtualMachineProfiler(exe, dev) data = np.random.rand(1, 1, 28, 28).astype("float32") report = vm.profile(data, func_name="main") assert "fused_nn_softmax" in str(report) assert "Total" in str(report) csv = read_csv(report) assert "Hash" in csv.keys() assert all([float(x) > 0 for x in csv["Duration (us)"]])
def test_vm(target, dev): dtype = "float32" x = relay.var("x", shape=(relay.Any(), relay.Any()), dtype=dtype) y = relay.var("y", shape=(relay.Any(), relay.Any()), dtype=dtype) mod = tvm.IRModule() mod["main"] = relay.Function([x, y], relay.add(x, y)) exe = relay.vm.compile(mod, target) vm = profiler_vm.VirtualMachineProfiler(exe, dev) data = np.random.rand(28, 28).astype("float32") report = vm.profile(data, data, func_name="main") assert "fused_add" in str(report) assert "Total" in str(report) assert "AllocTensorReg" in str(report) assert "AllocStorage" in str(report) csv = read_csv(report) assert "Hash" in csv.keys() assert all([float(x) > 0 for x in csv["Duration (us)"]])
def build_csv(d_list): """ build a csv from a list of dictionnaries """ #get one leveled dicitonnaries for d in d_list: parse_nested_objects(d) csv = {} #get the header columns of the csv for d in d_list: for k in d.keys(): if not k in csv: csv[k] = [] # get the values of the csv for d in d_list: for k in csv.keys(): if k in d: csv[k].append(d[k]) else: csv[k].append(None) return csv
def writeCsv(self, csv, filename): with open(filename, "w") as csvfile: for st in csv.keys(): csvfile.write("%s %d\n" % (st, csv[st][0]))