def main() -> None: if len(sys.argv) < 2: print(f"USAGE: {sys.argv[0]} (native|spdk|lkl)", file=sys.stderr) sys.exit(1) kinds = dict(native=StorageKind.NATIVE, spdk=StorageKind.SPDK, lkl=StorageKind.LKL, scone=StorageKind.SCONE) kind = kinds.get(sys.argv[1], None) if kind is None: print( f"Unsupported option '{sys.argv[1]}', valid options are native, spdk or lkl", file=sys.stderr, ) sys.exit(1) settings = create_settings() mount = Storage(settings).setup(kind) mount.mount() print(mount.dev)
def main() -> None: stats = read_stats("redis.json") settings = create_settings() storage = Storage(settings) record_count = 100000 op_count = 10000 benchmark = Benchmark(settings, storage, record_count, op_count) benchmarks = { "normal": benchmark_redis_normal, "trace": benchmark_redis_trace, } for name, benchmark_func in benchmarks.items(): while stats.runs[name] < 5: benchmark_func(benchmark, stats.data) stats.checkpoint(name) stats.to_tsv("redis-latest.tsv")
def main() -> None: stats = read_stats("smp.json") settings = create_settings() storage = Storage(settings) done_cores = set(stats["cores"]) for cores in [1, 2, 4, 6, 8]: if cores in done_cores: print(f"skip {cores} cores") continue benchmark_sgx_io(storage, stats, cores) write_stats("smp.json", stats) csv = f"smp-{NOW}.tsv" print(csv) throughput_df = pd.DataFrame(stats) throughput_df.to_csv(csv, index=False, sep="\t") throughput_df.to_csv("smp-latest.tsv", index=False, sep="\t")
def main() -> None: if len(sys.argv) < 2: print(f"USAGE: {sys.argv[0]} (native|dpdk|tap|client-native)", file=sys.stderr) sys.exit(1) kinds = { "native": NetworkKind.NATIVE, "client-native": NetworkKind.CLIENT_NATIVE, "dpdk": NetworkKind.DPDK, "tap": NetworkKind.TAP, } kind = kinds.get(sys.argv[1], None) if kind is None: print( f"Unsupported option '{sys.argv[1]}', valid options are native, client-native, dpdk or tap", file=sys.stderr, ) sys.exit(1) settings = create_settings() Network(settings).setup(kind)
def main() -> None: enable_perf = os.environ.get("ENABLE_PERF") is not None suffix = "-perf" if enable_perf else "" stats = read_stats(f"mysql{suffix}.json") settings = create_settings() storage = Storage(settings) benchmark = Benchmark(settings, storage) benchmarks = { "normal": benchmark_normal, "trace": benchmark_trace, "perf": benchmark_perf, } for name, benchmark_func in benchmarks.items(): while stats.runs[name] < 5: benchmark_func(benchmark, stats.data) stats.checkpoint(name) stats.to_tsv(f"mysql{suffix}-latest.tsv")
def main() -> None: stats = read_stats("hdparm.json") settings = create_settings() storage = Storage(settings) benchmarks = { "native": benchmark_hdparm_native, "sgx-lkl": benchmark_hdparm_sgx_lkl, "sgx-io": benchmark_hdparm_sgx_io, } system = set(stats["system"]) for name, benchmark in benchmarks.items(): if name in system: print(f"skip {name} benchmark") continue benchmark(storage, stats) write_stats("hdparm.json", stats) csv = f"hdparm-test-{NOW}.tsv" print(csv) df = pd.DataFrame(stats) df.to_csv(csv, index=False, sep="\t") df.to_csv("hdparm-test-latest.tsv", index=False, sep="\t")
def main() -> None: stats = read_stats("network-test-bs.json") settings = create_settings() setup_remote_network(settings) benchmark = Benchmark(settings) benchmarks = { #"sgx-lkl": benchmark_nw_test_sgx_lkl, "sgx-io": benchmark_nw_test_sgx_io, } system = set(stats["system"]) for name, bench_func in benchmarks.items(): if name in system: print(f"skip {name} benchmark") continue bench_func(benchmark, stats) write_stats("network-test-bs.json", stats) csv = f"network-test-bs-{NOW}.tsv" throughput_df = pd.DataFrame(stats) throughput_df.to_csv(csv, index=False, sep="\t") throughput_df.to_csv("network-test-bs-latest.tsv", index=False, sep="\t")
def main() -> None: stats = read_stats("iperf.json") settings = create_settings() setup_remote_network(settings) benchmark = Benchmark(settings) benchmarks = { "native": benchmark_native, "sgx-io": benchmark_sgx_io, "sgx-lkl": benchmark_sgx_lkl, "scone": benchmark_scone, } system = set(stats["system"]) for name, benchmark_func in benchmarks.items(): if name in system: print(f"skip {name} benchmark") continue benchmark_func(benchmark, stats) write_stats("iperf.json", stats) csv = f"iperf-latest.tsv" print(csv) pd.DataFrame(stats).to_csv(csv, index=False, sep="\t")
def __init__(self) -> None: self.settings = create_settings() self.network = Network(self.settings)
def __init__(self, settings: Settings, storage: Storage) -> None: self.settings = create_settings() self.storage = storage self.remote_nc = settings.remote_command(nix_build("netcat")) self.remote_wrk = settings.remote_command(nix_build("wrk"))
def __init__(self, settings: Settings) -> None: self.settings = create_settings() self.storage = Storage(settings) self.network = Network(settings) self.local_nc = nix_build("netcat-native")
def __init__(self, settings: Settings) -> None: self.settings = create_settings() self.storage = Storage(settings) self.network = Network(settings) self.remote_nc = settings.remote_command(nix_build("netcat-native")) self.remote_wrk = settings.remote_command(nix_build("wrk-bench"))
def main() -> None: settings = create_settings() test_nginx(settings) test_fstest()