cloudburst = CloudburstConnection( "a7b4fb6f87473467c86de15406e6a094-2079112079.us-east-1.elb.amazonaws.com", "34.239.175.232", ) cloudburst.list() import random import string salt = "".join(random.choices(string.ascii_letters, k=6)) print("Running sanity check") cloud_sq = cloudburst.register(lambda _, x: x * x, "square-2"+salt) print(cloud_sq(2).get()) cloudburst.delete_dag("dag") cloudburst.register_dag("dag", ["square-2"+salt], []) print(cloudburst.call_dag("dag", {"square-2"+salt: [2]}).get()) # 1 / 0 print("Running example flow") dataflow = Flow("example-flow"+salt, FlowType.PUSH, cloudburst) dataflow.map(map_fn, names=["sum"]).filter(filter_fn) table = Table([("a", IntType), ("b", IntType)]) table.insert([1, 2]) table.insert([1, 3]) table.insert([1, 4]) dataflow.register()
if len(sys.argv) > 3: max_workers = int(sys.argv[3]) if len(sys.argv) > 4: sequence_exec = sys.argv[4] == '0' num_requests = 100 * max_workers f_elb = 'a8576b08f54cb4c0ebefb5f79a448438-342174087.us-east-1.elb.amazonaws.com' my_ip = '34.204.195.6' cloudburst_client = CloudburstConnection(f_elb, my_ip, tid=0, local=False) if chosen_test == 'clean': all_func = cloudburst_client.list() for f in all_func: suc, err = cloudburst_client.delete_dag(f) print(f'Delete {f} {suc}') exit(0) def write_casual_test(cloudburst, size, key_num): exe_id = cloudburst.getid() new_v = np.random.random(size) result = serializer.dump_lattice(new_v, MultiKeyCausalLattice) keys = ['v' + str(i) for i in range(key_num)] start = time.time() res = [cloudburst.anna_client.causal_put(key, result, 0) for key in keys]
from cloudburst.shared.reference import CloudburstReference logging.basicConfig(stream=sys.stdout, level=logging.INFO) if len(sys.argv) < 2: print('Usage: ./sort_test.py {workload}') exit(1) f_elb = 'acca9d681767a4ce5b29cf0b44bc287b-2030441321.us-east-1.elb.amazonaws.com' my_ip = '34.204.195.6' cloudburst_client = CloudburstConnection(f_elb, my_ip, tid=0, local=False) chosen_test = sys.argv[1] if chosen_test == 'clean': suc, err = cloudburst_client.delete_dag('map') print(f'Delete map {suc}. {err}') suc, err = cloudburst_client.delete_dag('reduce') print(f'Delete reduce {suc}. {err}') exit(0) def prepare_sort_data(): if FORCE_GEN_DATA == 0: key_num_in_batch = int(key_num / batch_size) for i in range(batch_size): val = np.random.random(OSIZE) keys = [ f'o{k}' for k in range(i * key_num_in_batch, i * key_num_in_batch + key_num_in_batch)