def create_kubernetes_connection(cleanup=True, start_clipper=True, connect=True, with_proxy=False, num_frontend_replicas=1): logger.info("Creating KubernetesContainerManager") if with_proxy: cm = KubernetesContainerManager(kubernetes_proxy_addr="127.0.0.1:8080") else: cm = KubernetesContainerManager() cl = ClipperConnection(cm) if cleanup: cl.stop_all() # Give kubernetes some time to clean up time.sleep(20) logger.info("Done cleaning up clipper") if start_clipper: logger.info("Starting Clipper") cl.start_clipper( query_frontend_image= "568959175238.dkr.ecr.us-west-1.amazonaws.com/clipper/query_frontend:{}". format(clipper_version), mgmt_frontend_image= "568959175238.dkr.ecr.us-west-1.amazonaws.com/clipper/management_frontend:{}". format(clipper_version), num_frontend_replicas=num_frontend_replicas) time.sleep(1) if connect: try: cl.connect() except Exception: pass except ClipperException: pass return cl
def create_kubernetes_connection(cleanup=False, start_clipper=False, connect=False, with_proxy=False, num_frontend_replicas=1, cleanup_name='default-cluster', new_name='default-cluster', connect_name='default-cluster', service_types=None, namespace='default'): logger.info("Creating KubernetesContainerManager") cl = None assert cleanup or start_clipper or connect, "You must set at least one of {cleanup, start_clipper, connect} to be true." if with_proxy: kubernetes_proxy_addr = "127.0.0.1:8080" else: kubernetes_proxy_addr = None if cleanup: logger.info("Cleaning up Kubernetes Cluster {}".format(cleanup_name)) cm = KubernetesContainerManager( cluster_name=cleanup_name, useInternalIP=USE_MINIKUBE, service_types=service_types, kubernetes_proxy_addr=kubernetes_proxy_addr) cl = ClipperConnection(cm) cl.stop_all() logger.info("Done cleaning up clipper") if start_clipper: logger.info("Starting up Kubernetes Cluster {}".format(new_name)) cm = KubernetesContainerManager( cluster_name=new_name, kubernetes_proxy_addr=kubernetes_proxy_addr, namespace=namespace, useInternalIP=USE_MINIKUBE, service_types=service_types, create_namespace_if_not_exists=True) cl = ClipperConnection(cm) cl.start_clipper(num_frontend_replicas=num_frontend_replicas) if connect: try: cm = KubernetesContainerManager( cluster_name=connect_name, useInternalIP=USE_MINIKUBE, service_types=service_types, kubernetes_proxy_addr=kubernetes_proxy_addr) cl = ClipperConnection(cm) cl.connect() except Exception: pass return cl
def signal_handler(signal, frame): print("Stopping Clipper...") clipper_conn = ClipperConnection( KubernetesContainerManager(useInternalIP=True)) # clipper_conn = ClipperConnection(DockerContainerManager()) clipper_conn.stop_all() sys.exit(0)
def main(version, label): from clipper_admin import ClipperConnection, KubernetesContainerManager clipper_conn = ClipperConnection( KubernetesContainerManager(useInternalIP=True)) clipper_conn.connect() from clipper_admin.deployers import python as python_deployer registry = 'localhost:5000' python_deployer.deploy_python_closure(clipper_conn, name="sum-model", version=version, input_type="doubles", func=feature_sum, labels=[label], registry=registry)
# turn into predictions with open('submission.csv') as fh: lines = fh.readlines()[1:] # ignore first line preds = [line.strip().split(',')[1] for line in lines] return preds # pickle function and write to appropriate location s = six.StringIO() c = CloudPickler(s, 2) c.dump(libffm) serialized_prediction_function = s.getvalue() filepath = 'docker/lib/func.pkl' with open(filepath, 'w') as fh: fh.write(serialized_prediction_function) # refresh creds os.system('gcloud container clusters get-credentials redis-cluster') os.system('kubectl cluster-info') clipper_conn = ClipperConnection(KubernetesContainerManager(clipper_ip, useInternalIP=True)) clipper_conn.connect() # Build model and deploy to clipper version = int(time.time()) clipper_conn.build_and_deploy_model('ffm', version, 'strings', 'docker/lib', 'clipper/python-closure-container:develop', container_registry='ryanhoque') # Uncomment the following if first time #clipper_conn.link_model_to_app(app_name="testbed", model_name="ffm") # finally deploy new version of model to clipper (set version as timestamp) print('Successfully deployed model ffm version ' + str(version) + ' to Clipper.')
saver.restore(sess, './model.ckpt') load_infer_op = graph.get_tensor_by_name('probabilities:0') accuracy_op = graph.get_tensor_by_name('Mean_1:0') oX = graph.get_tensor_by_name('Placeholder:0') oY = graph.get_tensor_by_name('Placeholder_1:0') def predict(X): print("inputs {}".format(X)) result = sess.run(load_infer_op, feed_dict={oX: X}) ret = [str(i) for i in result] print("return is {}".format(ret)) return ret manager = KubernetesContainerManager(kubernetes_proxy_addr=K8S_ADDR, namespace=K8S_NS) clipper_conn = ClipperConnection(manager) clipper_conn.connect() # clipper_conn.delete_application(APP_NAME) # clipper_conn.register_application( # name = APP_NAME, input_type = 'doubles', default_output = '0', slo_micros = 100000000) deploy_tensorflow_model(clipper_conn, name=PREDICT_NAME, version=VERSION, input_type="doubles", func=predict, tf_sess_or_saved_model_path=sess, registry=REGISTRY, pkgs_to_install=['tensorflow'])
import argparse from clipper_admin import ClipperConnection, KubernetesContainerManager manager = KubernetesContainerManager( kubernetes_proxy_addr='127.0.0.1:8001', namespace='mdt') clipper_conn = ClipperConnection(manager) def deploy(): clipper_conn.start_clipper() def undeploy(): clipper_conn.stop_all() if __name__ == "__main__": parser = argparse.ArgumentParser(description='Deploy clippers') parser.add_argument('--op', help='opertations support d|u|r') args = parser.parse_args() if args.op == 'd': deploy() elif args.op == 'u': undeploy() else: print("unsupported operation {}".format(args.op))
from clipper_admin import ClipperConnection, KubernetesContainerManager from clipper_admin.deployers import python as python_deployer import requests, json, numpy as np import time clipper_conn = ClipperConnection( KubernetesContainerManager("https://35.197.66.133", useInternalIP=True)) clipper_conn.connect() addr = clipper_conn.get_query_addr() headers = {"Content-type": "application/json"} # sample from 5 arbitrary valid feature vectors possible_data = [ "27,17,45,28,2,28,27,29,28,1,1,,23,68fd1e64,960c983b,9fbfbfd5,38c11726,25c83c98,7e0ccccf,fe06fd10,062b5529,a73ee510,ca53fc84,67360210,895d8bbb,4f8e2224,f862f261,b4cc2435,4c0041e5,e5ba7672,b4abdd09,21ddcdc9,5840adea,36a7ab86,,32c7478e,85e4d73f,010f6491,ee63dd9b", "1,1,19,7,1,3,1,7,7,1,1,,2,09ca0b81,8947f767,a87e61f7,c4ba2a67,25c83c98,7e0ccccf,ce6020cc,062b5529,a73ee510,b04d3cfe,70dcd184,899eb56b,aca22cf9,b28479f6,a473257f,88f592e4,d4bb7bd8,bd17c3da,1d04f4a4,a458ea53,82bdc0bb,,32c7478e,5bdcd9c4,010f6491,cca57dcc", "8,11,38,9,316,25,8,11,10,1,1,,9,05db9164,09e68b86,aa8c1539,85dd697c,25c83c98,7e0ccccf,bc252bd0,5b392875,a73ee510,ef5c0d3c,0bd0c3b3,d8c29807,c0e6befc,8ceecbc8,d2f03b75,c64d548f,e5ba7672,63cdbb21,cf99e5de,5840adea,5f957280,,55dd3565,1793a828,e8b83407,b7d9c3bc", ",4,13,20,17700,,0,20,1,,0,,20,68fd1e64,08d6d899,9143c832,f56b7dd5,0942e0a7,7e0ccccf,e88f1cec,0b153874,a73ee510,3b08e48b,8f410860,ae1bb660,b8eec0b1,b28479f6,bffbd637,bad5ee18,776ce399,bbf70d82,,,0429f84b,,be7c41b4,c0d61a5c,,", "16,18,5203,8,0,0,4,49,10,0,1,,0,05db9164,9f7e1d07,0253bbf5,d6420627,4cf72387,,0db090eb,0b153874,a73ee510,3b08e48b,10e6a64f,31adfaee,38b5339a,07d13a8f,3e25e5f5,1621c7f4,e5ba7672,6a58e423,21ddcdc9,5840adea,bcc7a461,,32c7478e,3214afd4,ea9a246c,e7ecb821" ] fh = open('predictions.log', 'a') while (True): # poisson process with rate 0.1 interarrival_time = np.random.exponential(scale=10) time.sleep(interarrival_time) num_requests = int(np.random.random() * 10) + 1 # uniform between 1 and 10 input = list() for _ in range(num_requests): input.append(possible_data[int(np.random.random() * 5)])
headers = {'Content-type': 'application/json'} r = requests.post(url, headers=headers, data=req_json) # print(json.loads(r.text)) str_r = json.loads(r.text)["output"].replace("[", "").replace("]", "").split() vals = [float(i) for i in str_r] new_lineage_objs = [ Lineage.add_node(input_lin[i], model_name, vals[i]) for i in range(len(input_lin)) ] return new_lineage_objs #setup Clipper connection clipper_conn = ClipperConnection( KubernetesContainerManager(useInternalIP=True)) clipper_conn.connect() batch_size = 2 # batches = np.array([]) # times = np.array([]) for i in range(20): # batch_size = np.random.randint(5, high=50) print("request " + str(i)) if batch_size > 1: input_list = [ Lineage(np.random.random_sample()) for i in range(batch_size) ] out_lin_1 = predict(clipper_conn.get_query_addr(), "lineage1", input_list,
X = ckd.iloc[:300, ] X = X.loc[:, X.columns != 'classification'] X.fillna(0, inplace=True) y = ckd.iloc[:300, -1] # train a classifier model = GradientBoostingClassifier(random_state=2019) model.fit(X, y) # First we need to import Clipper from clipper_admin import ClipperConnection, KubernetesContainerManager from clipper_admin.deployers.python import deploy_python_closure # Create a Clipper connection clipper_conn = ClipperConnection( KubernetesContainerManager(useInternalIP=True, kubernetes_proxy_addr="127.0.0.1:8080")) # Start a Clipper cluster or connect to a running one clipper_conn.start_clipper() # Register an app called 'kddtutorial'. This would create a REST endpoint clipper_conn.register_application(name="kddtutorial", input_type="doubles", default_output="-1.0", slo_micros=10000000) # Access the trained model via closure capture def predict(inputs): global model pred = model.predict(inputs)
#!/usr/bin/env python # Start long-lived Clipper cluster import os, sys from clipper_admin import ClipperConnection, KubernetesContainerManager from clipper_admin.deployers import python as python_deployer if len(sys.argv) != 3: print('Usage: python start-clipper.py clipper-cluster-IP redis-service-IP') print("For example, python start-clipper.py 35.197.66.133 10.59.247.82") sys.exit(1) clipper_ip = 'https://' + sys.argv[1] redis_ip = 'https://' + sys.argv[2] clipper_conn = ClipperConnection( KubernetesContainerManager(clipper_ip, useInternalIP=True)) try: clipper_conn.stop_all() clipper_conn.stop_all_model_containers() clipper_conn.start_clipper() clipper_conn.register_application(name="testbed", input_type="strings", default_output="-1.0", slo_micros=100000000) clipper_conn.get_all_apps() except Exception as e: print(e)