Example #1
0
# Test code
import random
import timeit
from cloudburst.client.client import CloudburstConnection
local_cloud = CloudburstConnection('127.0.0.1', '127.0.0.1', local=True)
sq = lambda _, x: x * x
incr = lambda _, x: x + 1
cloud_sq = local_cloud.register(sq, 'sq')
cloud_incr = local_cloud.register(incr, 'incr')
local_cloud.register_dag('f', ['incr', 'sq'], [('incr', 'sq')])
rand_seq = random.sample(range(-2**50, 2**50), 100)


def test():
    for x in rand_seq:
        local_cloud.call_dag('f', {'incr': [x]}).get()


for rand_seed in range(100):
    random.seed(rand_seed)
    print(timeit.timeit(test, number=10))
Example #2
0
cloudburst = CloudburstConnection(
    "a7b4fb6f87473467c86de15406e6a094-2079112079.us-east-1.elb.amazonaws.com",
    "34.239.175.232",
)

cloudburst.list()

import random
import string
salt = "".join(random.choices(string.ascii_letters, k=6))

print("Running sanity check")
cloud_sq = cloudburst.register(lambda _, x: x * x, "square-2"+salt)
print(cloud_sq(2).get())
cloudburst.delete_dag("dag")
cloudburst.register_dag("dag", ["square-2"+salt], [])
print(cloudburst.call_dag("dag", {"square-2"+salt: [2]}).get())

# 1 / 0
print("Running example flow")
dataflow = Flow("example-flow"+salt, FlowType.PUSH, cloudburst)
dataflow.map(map_fn, names=["sum"]).filter(filter_fn)

table = Table([("a", IntType), ("b", IntType)])

table.insert([1, 2])
table.insert([1, 3])
table.insert([1, 4])

dataflow.register()
dataflow.deploy()
#!/usr/bin/python3.6

import sys
import torch
from PIL import Image
from cloudburst.client.client import CloudburstConnection
from droplet_modelzoo import Torch_Class

local_cloud = CloudburstConnection('127.0.0.1',
                                   '127.0.0.1',
                                   tid=20,
                                   local=True)
torch_init_arg = [str(sys.argv[1])]
torch_init_arg_two = (torch_init_arg, )
torch_class = local_cloud.register((Torch_Class, torch_init_arg_two),
                                   'torch_class')
local_cloud.register_dag('torch_dag', ['torch_class'], [])
# inp = Image.open("/Users/Avi/cloudburst/" + str(sys.argv[2]))
# print("Torch class incr get: " + str(torch_class(inp).get()))
# print("Torch dag incr get" + str(local_cloud.call_dag('torch_dag', {'torch_class': inp}).get()))
print(str(sys.argv[2]))

# cloudburst = CloudburstConnection('127.0.0.1', '127.0.0.1', local=True)
# incr = lambda _, a: a + 1
# cloud_incr = cloudburst.register(incr, 'incr')
# print(cloud_incr(1).get())
# square = lambda _, a: a * a
# cloud_square = cloudburst.register(square, 'square')
# print(cloud_square(2).get())
#!/usr/bin/python3.6

import codecs
import cloudpickle as cp
from cloudburst.client.client import CloudburstConnection

local_cloud = CloudburstConnection('127.0.0.1', '127.0.0.1', local=True)
incr = lambda _, a: a + 1
cloud_incr = local_cloud.register(incr, 'incr')
square = lambda _, a: a * a
cloud_square = local_cloud.register(square, 'square')
local_cloud.register_dag('test_dag', ['incr', 'square'], [('incr', 'square')])
val = local_cloud.call_dag('test_dag', { 'incr': 1 }).get()
print(val)
Example #5
0
    r_res = []
    for i in range(reduce_num):
        r_res.append(cloudburst_client.get(f'r_res{i}'))
    return m_res, r_res


# input_num = 4
# obj_num_per_reducer = 2
# sort_map(cloudburst_client, input_num * 0, input_num, reduce_num)
# sort_map(cloudburst_client, input_num * 1, input_num, reduce_num)
# sort_reduce(cloudburst_client, 0, map_num, obj_num_per_reducer)
# sort_reduce(cloudburst_client, 1, map_num, obj_num_per_reducer)
# print(get_runtime_info())
# exit(0)

cloudburst_client.register_dag('map', map_names, [])
cloudburst_client.register_dag('reduce', reduce_names, [])


def exec_one():
    mstart = time.time()
    m_res = cloudburst_client.call_dag('map', map_args).get()
    mtime = time.time() - mstart

    rstart = time.time()
    r_res = cloudburst_client.call_dag('reduce', reduce_args).get()
    rtime = time.time() - rstart

    time.sleep(2)
    m_res, r_res = get_runtime_info()