示例#1
0
def setup_clipper():
  app_name = 'resnet101-app'
  model_name = 'resnet101-model'
  clipper_conn = ClipperConnection(DockerContainerManager())
  clipper_conn.connect()
  
  pytorch_deployer.deploy_pytorch_model(clipper_conn=clipper_conn,
          name=model_name,
          version='1',
          input_type='bytes',
          func=resnet_predict,
          pytorch_model=resnet101,
          pkgs_to_install=['pillow', 'torch', 'torchvision'])

  clipper_conn.register_application(name=app_name,
          input_type="bytes",
          default_output="-1.0",
          slo_micros=10000000)  # 10s

  clipper_conn.link_model_to_app(app_name=app_name, model_name=model_name)
  print("query_adress: ", clipper_conn.get_query_addr())
  print("app_name: ", )
  print("model_name: ", )
  print("url: ", "http://{addr}/{app_name}/predict".format(addr=clipper_conn.get_query_addr(),app_name=app_name))
示例#2
0
from clipper_admin import ClipperConnection, DockerContainerManager
from clipper_admin.deployers import python as py_deployer
import random

cl = ClipperConnection(DockerContainerManager())
cl.stop_all()
cl.start_clipper()

# cl.register_application(name="pong", input_type="doubles", slo_micros=1000000, default_output="1")


def random_predict(xs):
    action = random.randint(0, 2)
    return [str(action) for _ in xs]


# cl.register_application(name="pong", input_type="doubles", default_output="-1.0", slo_micros=100000)

# py_deployer.deploy_python_closure(cl, name="rand-model", version=1, input_type="doubles", func=random_predict, registry="hsubbaraj")

# cl.link_model_to_app(app_name="pong", model_name="rand-model")

py_deployer.create_endpoint(cl,
                            name="pong",
                            input_type="doubles",
                            func=random_predict,
                            default_output="0",
                            slo_micros=100000)

print("CLIPPER ADDRESS: " + cl.get_query_addr())
示例#3
0
from clipper_admin import ClipperConnection, DockerContainerManager
from clipper_admin.deployers.pyspark import deploy_pyspark_model
from clipper_admin.deployers import python as python_deployer

clipper_conn = ClipperConnection(DockerContainerManager())
clipper_conn.start_clipper()
clipper_addr = clipper_conn.get_query_addr()

def preprocess(inputs):
	inputArr = (inputs[0]).split(",")
	floats = inputArr[:-1]
	rounded = [round(float(i),1) for i in floats]
	rounded.append(inputArr[-1])
	output = [(str(rounded))[1:-1]]
	return output

python_deployer.deploy_python_closure(
    clipper_conn,
    name="process-iris",  # The name of the model in Clipper
    version=1,  # A unique identifier to assign to this model.
    input_type="string",  # The type of data the model function expects as input
    func=preprocess # The model function to deploy
)

clipper_conn.register_application(
    name="process-app",
    input_type="strings",
    default_output="-1",
    slo_micros=9000000) #will return default value in 9 seconds

clipper_conn.link_model_to_app(app_name="process-app", model_name="process-iris")
示例#4
0
    return [size]


# Stop Clipper on Ctrl-C
def signal_handler(signal, frame):
    print("Stopping Clipper...")
    clipper_conn = ClipperConnection(DockerContainerManager())
    clipper_conn.stop_all()
    sys.exit(0)


if __name__ == '__main__':
    signal.signal(signal.SIGINT, signal_handler)

    parser = argparse.ArgumentParser(
        description='Use Clipper to Query Images.')
    parser.add_argument('image', nargs='+', help='Path to an image')
    imgs = parser.parse_args().image

    clipper_conn = ClipperConnection(DockerContainerManager())
    clipper_conn.start_clipper()
    python_deployer.create_endpoint(clipper_conn, "image-example", "bytes",
                                    image_size)
    time.sleep(2)
    try:
        for f in imgs:
            if f.endswith('.jpg') or f.endswith('.png'):
                predict(clipper_conn.get_query_addr(), f)
    except Exception as e:
        clipper_conn.stop_all()
示例#5
0
    elif i % 5 == 0:
        return produce_query_arr_for_ms(100)
    elif i % 3 == 0:
        return produce_query_arr_for_ms(50)
    else:
        return produce_query_arr_for_ms(10)


if __name__ == '__main__':
    signal.signal(signal.SIGINT, signal_handler)
    clipper_conn = ClipperConnection(DockerContainerManager())
    clipper_conn.start_clipper()
    print("Starting Clipper")
    python_deployer.create_endpoint(clipper_conn,
                                    "simple-example",
                                    "doubles",
                                    feature_sum,
                                    num_replicas=2)
    time.sleep(2)
    print("Starting Prediction")

    try:
        counter = 0
        while True:
            print(counter)
            predict(clipper_conn.get_query_addr(), fizz_buzz(counter))
            counter += 1
            time.sleep(0.2)
    except Exception as e:
        clipper_conn.stop_all()
#setup Clipper connection
clipper_conn = ClipperConnection(
    KubernetesContainerManager(useInternalIP=True))
clipper_conn.connect()

batch_size = 2
# batches = np.array([])
# times = np.array([])
for i in range(20):
    # batch_size = np.random.randint(5, high=50)
    print("request " + str(i))
    if batch_size > 1:
        input_list = [
            Lineage(np.random.random_sample()) for i in range(batch_size)
        ]
        out_lin_1 = predict(clipper_conn.get_query_addr(),
                            "lineage1",
                            input_list,
                            batch=True)
        print(out_lin_1)
        out_lin_2 = predict(clipper_conn.get_query_addr(),
                            "lineage2",
                            out_lin_1,
                            batch=True)
        print(out_lin_2)
    else:
        input_lin = Lineage(np.random.random_sample())
        out_lin_1 = predict(clipper_conn.get_query_addr(), "lineage1",
                            input_lin)
        print(out_lin_1)
        out_lin_2 = predict(clipper_conn.get_query_addr(), "lineage2",
    logger.info("Start Metric Test (0/1): Running 2 Replicas")
    clipper_conn = ClipperConnection(DockerContainerManager(redis_port=6380))
    clipper_conn.start_clipper()
    python_deployer.create_endpoint(clipper_conn,
                                    "simple-example",
                                    "doubles",
                                    feature_sum,
                                    num_replicas=2)
    time.sleep(2)
    try:
        logger.info(
            "Making 100 predictions using two model container; Should takes 25 seconds."
        )
        for _ in range(100):
            predict(clipper_conn.get_query_addr(), np.random.random(200))
            time.sleep(0.2)

        logger.info("Test 1: Checking status of 3 node exporter")
        check_target_health(metric_addr)
        logger.info("Test 1 Passed")

        logger.info("Test 2: Checking Model Container Metrics")
        conf = get_metrics_config()
        conf = conf['Model Container']
        prefix = 'clipper_{}_'.format(conf.pop('prefix'))
        for name, spec in conf.items():
            name = prefix + name
            if spec['type'] == 'Histogram' or spec['type'] == 'Summary':
                name += '_sum'
示例#8
0
        tmp = tempfile.NamedTemporaryFile('wb', delete=False, suffix='.png')
        tmp.write(io.BytesIO(imgs[i]).getvalue())
        tmp.close()

        # Use PIL to read in the file and compute size
        size = PIL.Image.open(tmp.name, 'r').size

        # Remove the temp file
        os.unlink(tmp.name)

        sizes.append(size)
    return sizes


from clipper_admin import ClipperConnection, DockerContainerManager
from clipper_admin.deployers import python as python_deployer

clipper_conn = ClipperConnection(DockerContainerManager())
clipper_conn.start_clipper()

python_deployer.create_endpoint(clipper_conn=clipper_conn,
                                name="image-size",
                                input_type="bytes",
                                func=image_size,
                                pkgs_to_install=['pillow'])

print('clipper_conn.get_query_addr()' + clipper_conn.get_query_addr())

# To test
# query(clipper_conn.get_query_addr(), 'imgs/clipper-logo.png')
示例#9
0
deploy_pytorch_model(
    clipper_conn,
    name="superresolution-model",
    version=1,
    input_type="bytes",
    func=image_enhance,
    pytorch_model=model,
    base_image='custom-model-image',
    pkgs_to_install=['opencv-python','numpy','six', 'Pillow','wheel',]
    )

print("linking model to app...")

clipper_conn.link_model_to_app(app_name="superresolution", model_name="superresolution-model")

def query(addr, filename):
    url = "http://%s/superresolution/predict" % addr
    req_json = json.dumps({
        "input":
        base64.b64encode(open(filename, "rb").read()).decode() # bytes to unicode
    })
    headers = {'Content-type': 'application/json'}
    r = requests.post(url, headers=headers, data=req_json)
    print(r.json())


print("deployed... do a query")

query(clipper_conn.get_query_addr(),'LR/baboon.png')
示例#10
0
	return Lineage.add_node(input_lin, model_name, json.loads(r.text)["output"][0])


#setup Clipper connection
clipper_conn = ClipperConnection(KubernetesContainerManager(useInternalIP=True))
clipper_conn.connect()

batch_size = 1
# batches = np.array([])
# times = np.array([])
for i in range(20):
	# batch_size = np.random.randint(5, high=50)
	print("request " + str(i))
	if batch_size > 1:
		input_list = [Lineage(np.random.random_sample()) for i in range(batch_size)]
		out_json = predict(clipper_conn.get_query_addr(), "lineage1", input_list, batch=True)
		# print(json.loads(output_1)["output"][0])
	else:
		input_lin = Lineage(np.random.random_sample())
		out_lin_1 = predict(clipper_conn.get_query_addr(), "lineage1", input_lin)
		print(out_lin_1)
		out_lin_2 = predict(clipper_conn.get_query_addr(), "lineage2", out_lin_1)
		print(out_lin_2)
		print(out_lin_1)
print("finished running clipper")





示例#11
0
#setup Clipper connection
clipper_conn = ClipperConnection(KubernetesContainerManager(useInternalIP=True))
clipper_conn.connect()

batch_size = 2

dataiter = iter(testloader)
# image = dataiter.next()
# print(image)
# print(image[0].data.numpy().tolist())
# print(type(image[0].data.numpy().tolist()))
for i in range(10):
	print("request " + str(i))
	if batch_size > 1:
		input_list = []
		for j in range(batch_size):
			image = dataiter.next()
			input_list += image[0].data.numpy().flatten().tolist()
		# print("input list")
		# print(type(input_list))
		# print(len(input_list))
		predict(
			clipper_conn.get_query_addr(),
			input_list,
			batch=True)
	else:
		image = dataiter.next()
		predict(clipper_conn.get_query_addr(), image[0].data.numpy().flatten().tolist())
	time.sleep(0.2)
print("finished running clipper")
    print("Stopping Clipper...")
    clipper_conn = ClipperConnection(DockerContainerManager())
    clipper_conn.stop_all()
    sys.exit(0)


if __name__ == '__main__':
    signal.signal(signal.SIGINT, signal_handler)

    parser = argparse.ArgumentParser(
        description='Use Clipper to Query Images.')
    parser.add_argument('image', nargs='+', help='Path to an image')
    imgs = parser.parse_args().image

    clipper_conn = ClipperConnection(DockerContainerManager())
    clipper_conn.start_clipper()
    python_deployer.create_endpoint(clipper_conn=clipper_conn,
                                    name="image-example",
                                    input_type="bytes",
                                    func=image_size,
                                    pkgs_to_install=['pillow'])
    time.sleep(2)
    try:
        for f in imgs:
            if f.endswith('.jpg') or f.endswith('.png'):
                query(clipper_conn.get_query_addr(), f)
    except Exception as e:
        print("exception")
    clipper_conn.get_clipper_logs()
    clipper_conn.stop_all()