def _get_or_register_function(self, func_name, func): cloudburst_client = CloudburstConnection(self.cloudburst_addr, self.caller_ip) cloud_func = cloudburst_client.get_function(func_name) if cloud_func is None: # print("Function '{}' not found in Hydro. Registering..." # .format(func_name)) # Hack to get cloudpickle to serialize the whole function. func.__module__ = '__main__' cloud_func = cloudburst_client.register(func, func_name) if not cloud_func: raise Exception("Function registration failed.") return cloud_func
# Test code import random import timeit from cloudburst.client.client import CloudburstConnection local_cloud = CloudburstConnection('127.0.0.1', '127.0.0.1', local=True) sq = lambda _, x:x*x incr = lambda _, x:x+1 cloud_sq = local_cloud.register(sq, 'sq') cloud_incr = local_cloud.register(incr, 'incr') f = lambda _, x: sq(None, incr(None, x)) cloud_f = local_cloud.register(f, 'f') rand_seq = random.sample(range(-2**50, 2**50), 100) def test(): for x in rand_seq: cloud_f(x).get() for rand_seed in range(100): random.seed(rand_seed) print(timeit.timeit(test, number=10))
return row["sum"] % 2 == 0 cloudburst = CloudburstConnection( "a7b4fb6f87473467c86de15406e6a094-2079112079.us-east-1.elb.amazonaws.com", "34.239.175.232", ) cloudburst.list() import random import string salt = "".join(random.choices(string.ascii_letters, k=6)) print("Running sanity check") cloud_sq = cloudburst.register(lambda _, x: x * x, "square-2"+salt) print(cloud_sq(2).get()) cloudburst.delete_dag("dag") cloudburst.register_dag("dag", ["square-2"+salt], []) print(cloudburst.call_dag("dag", {"square-2"+salt: [2]}).get()) # 1 / 0 print("Running example flow") dataflow = Flow("example-flow"+salt, FlowType.PUSH, cloudburst) dataflow.map(map_fn, names=["sum"]).filter(filter_fn) table = Table([("a", IntType), ("b", IntType)]) table.insert([1, 2]) table.insert([1, 3]) table.insert([1, 4])
from cloudburst.client.client import CloudburstConnection local_cloud = CloudburstConnection('127.0.0.1', '127.0.0.1', local=True) make_list = local_cloud.register(lambda _, i: [x*x for x in range(i)], 'make_list') list_5 = make_list(5) take_elem = local_cloud.register(lambda _, l, i: l[i], 'take_elem') # breakpoint() res = take_elem(list_5, 3) print(res)
#!/usr/bin/python3.6 import codecs import cloudpickle as cp from cloudburst.client.client import CloudburstConnection local_cloud = CloudburstConnection('127.0.0.1', '127.0.0.1', local=True) incr = lambda _, a: a + 1 cloud_incr = local_cloud.register(incr, 'incr') square = lambda _, a: a * a cloud_square = local_cloud.register(square, 'square') local_cloud.register_dag('test_dag', ['incr', 'square'], [('incr', 'square')]) val = local_cloud.call_dag('test_dag', { 'incr': 1 }).get() print(val)
#!/usr/bin/python3.6 import sys import torch from PIL import Image from cloudburst.client.client import CloudburstConnection from droplet_modelzoo import Torch_Class local_cloud = CloudburstConnection('127.0.0.1', '127.0.0.1', tid=20, local=True) torch_init_arg = [str(sys.argv[1])] torch_init_arg_two = (torch_init_arg, ) torch_class = local_cloud.register((Torch_Class, torch_init_arg_two), 'torch_class') local_cloud.register_dag('torch_dag', ['torch_class'], []) # inp = Image.open("/Users/Avi/cloudburst/" + str(sys.argv[2])) # print("Torch class incr get: " + str(torch_class(inp).get())) # print("Torch dag incr get" + str(local_cloud.call_dag('torch_dag', {'torch_class': inp}).get())) print(str(sys.argv[2])) # cloudburst = CloudburstConnection('127.0.0.1', '127.0.0.1', local=True) # incr = lambda _, a: a + 1 # cloud_incr = cloudburst.register(incr, 'incr') # print(cloud_incr(1).get()) # square = lambda _, a: a * a # cloud_square = cloudburst.register(square, 'square') # print(cloud_square(2).get())
map_num = reduce_num = 4 input_num = int(key_num / map_num) map_args = {} for i in range(map_num): map_args[f'mapper{i}'] = [input_num * i, input_num, reduce_num] obj_num_per_reducer = int(input_num / reduce_num) reduce_args = {} for i in range(reduce_num): reduce_args[f'reducer{i}'] = [i, map_num, obj_num_per_reducer] map_names = [] for i in range(map_num): name = f'mapper{i}' map_func = cloudburst_client.register(sort_map, name) print(f'register {name}') map_names.append(name) reduce_names = [] for i in range(reduce_num): name = f'reducer{i}' reduce_func = cloudburst_client.register(sort_reduce, name) print(f'register {name}') reduce_names.append(name) def get_runtime_info(): m_res = [] for i in range(map_num): m_res.append(cloudburst_client.get(f'm_res{i}'))
#execution_command = '/mplambda/build/mpl_lambda_pseudo --scenario se3 --algorithm cforest --coordinator "$COORDINATOR" --jobs 10 --env se3/Home_env.dae --robot se3/Home_robot.dae --start 0,1,0,0,252.95,-214.95,46.19 --goal 0,1,0,0,262.95,75.05,46.19 --min -383.802642822,-371.469055176,-0.196851730347 --max 324.997131348,337.893371582,142.332290649 --time-limit ' + str(time_limit) + ' --check-resolution 0.01 --anna_address ' + anna_routing_address + ' --local_ip ' + local_ip + ' --execution_id ' + execution_id + ' --thread_id ' + thread_id + ' 2>/logs_{}_{}'.format(execution_id, str(time.time())) # cubicle #execution_command = '/mplambda/build/mpl_lambda_pseudo --scenario se3 --algorithm cforest --coordinator "$COORDINATOR" --jobs 10 --env resources/se3/cubicles_env.dae --robot resources/se3/cubicles_robot.dae --start 0,1,0,0,-4.96,-40.62,70.57 --goal 0,1,0,0,200.0,-40.62,70.57 --min -508.88,-230.13,-123.75 --max 319.62,531.87,101.0 --time-limit ' + str(time_limit) + ' --check-resolution 0.01 --anna_address ' + anna_routing_address + ' --local_ip ' + local_ip + ' --execution_id ' + execution_id + ' --thread_id ' + thread_id + ' 2>/logs_{}_{}'.format(execution_id, str(time.time())) print(execution_command) #execution_command = "echo hello world" + '-anna_address ' + anna_routing_address + ' --local_ip ' + local_ip + ' --execution_id ' + execution_id + ' --thread_id ' + thread_id + ' >/logs' result = subprocess.run([execution_command], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) print(result) return result, thread_id cloud_func = dc.register(mpl_anna, 'twisty_anna') # wait for 1 second for the registration process to fully finish time.sleep(1) # f = open("result_" + str(time.time()) + ".txt", "w") f = open("result_" + "k_20_l_8_t_1" + ".txt", "w") run = 0 # run the same experiment 24 times and take average while run < 60: time.sleep(1) print("alive") run += 1 f.write('T' + str(run) + '\n') print('T' + str(run)) from random import random solution_key = ('solution_key_' + str(time.time()) + str(random())
execution_command = '/hydro/mplambda/build/mpl_lambda_pseudo --scenario fetch --algorithm cforest --coordinator "$COORDINATOR" --jobs 10 --env AUTOLAB.dae --env-frame=0.38,-0.90,0.00,0,0,-$PI_2 --goal=-1.07,0.16,0.88,0,0,0 --goal-radius=0.01,0.01,0.01,0.01,0.01,$PI --start=0.1,$PI_2,$PI_2,0,$PI_2,0,$PI_2,0 --time-limit 60 --check-resolution 0.01 --anna_address ' + anna_routing_address + ' --local_ip ' + local_ip + ' --execution_id ' + execution_id + ' --thread_id ' + thread_id result = subprocess.run([execution_command], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) print("result", result, "111") print("thread_id", thread_id, "222") log.info("result" + result + "333333") log.info("thread_id" + thread_id + "555555") return result, thread_id cloud_func = dc.register(mpl_anna, 'mpl_anna') # wait for 1 second for the registration process to fully finish time.sleep(1) f = open("result.txt", "w") run = 0 # run the same experiment 24 times and take average while run <= 1: time.sleep(1) run += 1 f.write('T' + str(run) + '\n') print('T' + str(run)) solution_key = ('solution_key_' + str(run)) # deferentiate keys future_list = [] for i in range( 2