def test_linear_delete(): # first fill up the database lib.run_benchmark("--workload I --start-key 0 --keys 100") lib.check_for_range(0, 100) # then delete it all lib.run_benchmark("--workload DB --start-key 0 --keys 100") lib.check_for_range(0, 0)
def test_random_udf(): lib.upload_udf("test_module.lua", udf_module) # initialize all records to have just one bin "testbin" with value 1 lib.run_benchmark("--workload I --start-key 0 --keys 20 -o 1") # the UDF should eventually reach all 20 records, incrementing "testbin" to 2 lib.run_benchmark("--duration 1 --workload RUF,0,0 -upn test_module " + "-ufn increment_bin_to_2 -ufv \\\"testbin\\\" --start-key 0 --keys 20 --random", do_reset=False) lib.check_for_range(0, 20, lambda meta, key, bins: lib.obj_spec_is_const_I(bins["testbin"], 2))
def test_write_bins_simple_async(): def check_bins(b): assert (len(b) == 2) lib.obj_spec_is_I1(b["testbin"]) lib.obj_spec_is_I3(b["testbin_3"]) lib.run_benchmark("--workload I --start-key 0 --keys 100 -o I1,I2,I3,I4 " + "--random --write-bins 1,3 --async") lib.check_for_range(0, 100, lambda meta, key, bins: check_bins(bins))
def test_linear_delete_subset_async(): # first fill up the database lib.run_benchmark("--workload I --start-key 0 --keys 1000") lib.check_for_range(0, 1000) # then delete a subset of the database lib.run_benchmark("--workload DB --start-key 300 --keys 500 --async", do_reset=False) lib.check_recs_exist_in_range(0, 300) lib.check_recs_exist_in_range(800, 1000) assert (len(lib.scan_records()) == 500)
def test_const_map_async(): def check_bin(meta, key, bins): assert("bin_1" in bins) assert("bin_2" in bins) lib.obj_spec_is_I1(bins["bin_1"]) lib.obj_spec_is_I1(bins["bin_2"] - 256) lib.reset() lib.upload_udf("test_module.lua", udf_module) # the UDF should eventually reach all 100 records, writing both "bin_1" and "bin_2" lib.run_benchmark("--duration 1 --workload RUF,0,0 -upn test_module " + "-ufn read_object -ufv \"{\\\"write_bin_1\\\":b,\\\"random_value\\\":I1}\" " + "--start-key 0 --keys 100 --random --async", do_reset=False) lib.check_for_range(0, 100, check_bin)
def test_write_bins_random(): def check_bins(b): assert (len(b) == 2) assert (type(b["testbin_2"]) is list) lib.obj_spec_is_D(b["testbin_2"][0]) lib.obj_spec_is_S(b["testbin_2"][1], 10) lib.obj_spec_is_B(b["testbin_5"], 10) lib.run_benchmark("--workload RU --duration 1 --start-key 0 --keys 200 " + "-o I5,[D,S10],I3,D,B10 --random --write-bins 2,5") # it's very, very likely that after 1 second, all 200 key values would # have been randomly chosen lib.check_for_range(0, 200, lambda meta, key, bins: check_bins(bins))
def test_const_fixed_map(): prev_bin_1 = -1 def check_bin(meta, key, bins): nonlocal prev_bin_1 assert("bin_1" in bins) lib.obj_spec_is_I1(bins["bin_1"]) if prev_bin_1 == -1: prev_bin_1 = bins["bin_1"] else: assert(bins["bin_1"] == prev_bin_1) lib.reset() lib.upload_udf("test_module.lua", udf_module) # the UDF should eventually reach all 100 records, writing both "bin_1" and "bin_2" lib.run_benchmark("--duration 1 --workload RUF,0,0 -upn test_module " + "-ufn read_object -ufv \"{\\\"write_bin_1\\\":true,\\\"random_value\\\":I1}\" " + "--start-key 0 --keys 100 -z 1", do_reset=False) lib.check_for_range(0, 100, check_bin)
def test_write_bins_delete(): def check_bins_before(b): assert (len(b) == 4) lib.obj_spec_is_I1(b["testbin"]) lib.obj_spec_is_I2(b["testbin_2"]) lib.obj_spec_is_I3(b["testbin_3"]) lib.obj_spec_is_I4(b["testbin_4"]) def check_bins_after(b): assert (len(b) == 2) lib.obj_spec_is_I2(b["testbin_2"]) lib.obj_spec_is_I4(b["testbin_4"]) lib.run_benchmark( "--workload I --start-key 0 --keys 100 -o I1,I2,I3,I4 --random") lib.check_for_range(0, 100, lambda meta, key, bins: check_bins_before(bins)) lib.run_benchmark( "--workload DB --start-key 0 --keys 100 -o I1,I2,I3,I4 " + "--write-bins 1,3", do_reset=False) lib.check_for_range(0, 100, lambda meta, key, bins: check_bins_after(bins))
def test_set_bin_fixed_bool(): true_cnt = 0 false_cnt = 0 def check_bin(meta, key, bins): nonlocal true_cnt nonlocal false_cnt assert("bool_bin" in bins) lib.obj_spec_is_b(bins["bool_bin"]) if bins["bool_bin"]: true_cnt += 1 else: false_cnt += 1 lib.upload_udf("test_module.lua", udf_module) # initialize all records to have just one bin "testbin" with value 1 lib.run_benchmark("--workload I --start-key 0 --keys 20 -o 1") # the UDF should eventually reach all 20 records, incrementing "testbin" to 2 lib.run_benchmark("--duration 1 --workload RUF,0,0 -upn test_module " + "-ufn set_bin -ufv \\\"bool_bin\\\",b --start-key 0 --keys 20 -z 1", do_reset=False) lib.check_for_range(0, 20, check_bin) assert((true_cnt > 0 and false_cnt == 0) or (true_cnt == 0 and false_cnt > 0))
def test_linear_write_keys_async(): lib.run_benchmark("--workload I --keys 100 --async") lib.check_for_range(1, 101)
def test_linear_write_start_key_async(): lib.run_benchmark("--workload I --start-key 1000 --keys 1000 --async") lib.check_for_range(1000, 2000)