Exemplo n.º 1
0
def main():
    # Setup container manager.
    # k8 = KubernetesContainerManager(kubernetes_proxy_addr="127.0.0.1:8080",
    #                                 useInternalIP=True)
    # clipper_conn = ClipperConnection(k8)
    swarm = DockerContainerManager()
    clipper_conn = ClipperConnection(swarm)
    clipper_conn.stop_all()
    clipper_conn.start_clipper()

    # Register application.
    clipper_conn.register_application(name="sum-app", 
                                      input_type="doubles", 
                                      default_output="-1.0", 
                                      slo_micros=10000000)

    # Model deployement.
    python_deployer.deploy_python_closure(clipper_conn, 
                                          name="sum-model", 
                                          version=1, 
                                          input_type="doubles", 
                                          func=sum)

    # Link application to model.
    clipper_conn.link_model_to_app(app_name="sum-app", 
                                   model_name="sum-model")

    # Test
    headers = {"Content-type": "application/json"}
    response = requests.post("http://localhost:1337/sum-app/predict", 
                             headers=headers, 
                             data=json.dumps({"input": list(np.random.random(10))})).json()
    print(response)
Exemplo n.º 2
0
    def test_deployed_python_closure_queried_successfully(self):
        model_version = 1

        def predict_func(inputs):
            return [str(len(x)) for x in inputs]

        input_type = "doubles"
        deploy_python_closure(self.clipper_conn, self.model_name_1,
                              model_version, input_type, predict_func)

        self.clipper_conn.link_model_to_app(self.app_name_1, self.model_name_1)
        time.sleep(60)

        received_non_default_prediction = False
        addr = self.clipper_conn.get_query_addr()
        url = "http://{addr}/{app}/predict".format(addr=addr,
                                                   app=self.app_name_1)
        test_input = [101.1, 99.5, 107.2]
        req_json = json.dumps({'input': test_input})
        headers = {'Content-type': 'application/json'}
        for i in range(0, 40):
            response = requests.post(url, headers=headers, data=req_json)
            parsed_response = response.json()
            print(parsed_response)
            output = parsed_response["output"]
            if output == self.default_output:
                time.sleep(20)
            else:
                received_non_default_prediction = True
                self.assertEqual(int(output), len(test_input))
                break

        self.assertTrue(received_non_default_prediction)
def auth_deploy_python_model(clipper_conn,
                             model_name,
                             wave_obj,
                             recipient_entity,
                             ciphertext,
                             version=1,
                             input_type="doubles"):
    '''Deploy a Python function with a python model.

    Parameters
    ----------
    clipper_conn : :py:meth:`clipper_admin.ClipperConnection`
        A ``ClipperConnection`` object connected to a running Clipper cluster.
    name : str
        The name to be assigned to both the registered application and deployed model.
    version : str
        The version to assign this model. Versions must be unique on a per-model
        basis, but may be re-used across different models.
    input_type : str
        The input_type to be associated with the registered app and deployed model.
        One of "integers", "floats", "doubles", "bytes", or "strings".
    func : function
        The prediction function. Any state associated with the function will be
        captured via closure capture and pickled with Cloudpickle.
    wave_obj: wave object
    recipient_entity: parameter for wave decrypt call
    ciphertext: text to be decoded
    '''
    decrypt_response = wave_obj.DecryptMessage(
        wv.DecryptMessageParams(perspective=wv.Perspective(
            entitySecret=wv.EntitySecret(DER=recipient_entity.SecretDER)),
                                ciphertext=ciphertext,
                                resyncFirst=True))
    if decrypt_response.error.code != 0:
        raise Exception("Incorrect authentication")

    model = cloudpickle.loads(decrypt_response.content)

    # temporarily put the predict method here...
    def predict(inputs):
        # model.predict returns a list of predictions
        preds = model.predict(inputs)
        return [str(p) for p in preds]

    py_deployer.deploy_python_closure(
        clipper_conn,
        name=model_name,
        version=version,
        input_type=input_type,
        func=predict,
        pkgs_to_install=["numpy", "scipy", "pandas", "sklearn"])
def main(version, label):
    from clipper_admin import ClipperConnection, KubernetesContainerManager
    clipper_conn = ClipperConnection(
        KubernetesContainerManager(useInternalIP=True))
    clipper_conn.connect()
    from clipper_admin.deployers import python as python_deployer
    registry = 'localhost:5000'
    python_deployer.deploy_python_closure(clipper_conn,
                                          name="sum-model",
                                          version=version,
                                          input_type="doubles",
                                          func=feature_sum,
                                          labels=[label],
                                          registry=registry)
Exemplo n.º 5
0
    def test_test_predict_function(self):
        def predict_func(xs):
            return [sum(x) for x in xs]

        self.clipper_conn.register_application(name="hello-world",
                                               input_type="doubles",
                                               default_output="-1.0",
                                               slo_micros=100000)

        deploy_python_closure(self.clipper_conn,
                              name="sum-model",
                              version=1,
                              input_type="doubles",
                              func=predict_func)
        self.clipper_conn.link_model_to_app(app_name="hello-world",
                                            model_name="sum-model")
        time.sleep(60)

        addr = self.clipper_conn.get_query_addr()

        # Added a trailing slash on predict url for test
        url = "http://{addr}/hello-world/predict/".format(addr=addr,
                                                          app='hello-world')

        headers = {"Content-type": "application/json"}
        test_input = [1.1, 2.2, 3.3]
        pred = requests.post(url,
                             headers=headers,
                             data=json.dumps({"input": test_input})).json()
        test_predict_result = self.clipper_conn.test_predict_function(
            query={"input": test_input},
            func=predict_func,
            input_type="doubles")
        logger.info("test pred output {}".format(pred))
        self.assertEqual([pred['output']],
                         test_predict_result)  # tests single input

        test_batch_input = [[1.1, 2.2, 3.3], [4.4, 5.5, 6.6]]
        batch_pred = requests.post(
            url,
            headers=headers,
            data=json.dumps({"input_batch": test_batch_input})).json()
        test_batch_predict_result = self.clipper_conn.test_predict_function(
            query={"input_batch": test_batch_input},
            func=predict_func,
            input_type="doubles")
        batch_predictions = batch_pred['batch_predictions']
        batch_pred_outputs = [batch['output'] for batch in batch_predictions]
        self.assertEqual(batch_pred_outputs,
                         test_batch_predict_result)  # tests batch input
Exemplo n.º 6
0
    def test_python_closure_deploys_successfully(self):
        model_name = "m2"
        model_version = 1

        def predict_func(inputs):
            return ["0" for x in inputs]

        input_type = "doubles"
        deploy_python_closure(self.clipper_conn, model_name, model_version,
                              input_type, predict_func)
        model_info = self.clipper_conn.get_model_info(model_name,
                                                      model_version)
        self.assertIsNotNone(model_info)

        docker_client = get_docker_client()
        py_minor_version = (sys.version_info.major, sys.version_info.minor)
        if py_minor_version < (3, 0):
            containers = docker_client.containers.list(
                filters={
                    "ancestor":
                    "clipper/python-closure-container:{}".format(
                        clipper_version)
                })

        elif py_minor_version == (3, 5):
            containers = docker_client.containers.list(
                filters={
                    "ancestor":
                    "clipper/python35-closure-container:{}".format(
                        clipper_version)
                })
        elif py_minor_version == (3, 6):
            containers = docker_client.containers.list(
                filters={
                    "ancestor":
                    "clipper/python36-closure-container:{}".format(
                        clipper_version)
                })
        else:
            msg = (
                "Python closure deployer only supports Python 2.7, 3.5, and 3.6. "
                "Detected {major}.{minor}").format(
                    major=sys.version_info.major, minor=sys.version_info.minor)
            logger.error(msg)

        self.assertGreaterEqual(len(containers), 1)
def deploy_and_test_model(clipper_conn,
                          model,
                          version,
                          predict_fn,
                          link_model=False):
    deploy_python_closure(clipper_conn,
                          model_name,
                          version,
                          "integers",
                          predict_fn,
                          batch_size=1,
                          pkgs_to_install=['xgboost'])
    time.sleep(5)

    if link_model:
        clipper_conn.link_model_to_app(app_name, model_name)
        time.sleep(5)

    test_model(clipper_conn, app_name, version)
Exemplo n.º 8
0
def deployModelToClipper():
    """Deploy model to clipper and replace its entry."""
    global app_name, model_name, model_version

    print('Deploying model to clipper, model_name={}, model_version={}'.format(
        model_name, model_version))

    # Setup clipper and deploy model
    clipper_conn = ClipperConnection(DockerContainerManager(redis_port=6380))
    try:
        clipper_conn.start_clipper()
    except:
        clipper_conn.connect()
    try:
        # input_type must be bytes as inputs will be serialized into bytes with pickle
        clipper_conn.register_application(name=app_name,
                                          input_type="bytes",
                                          default_output="-1.0",
                                          slo_micros=1000000)
    except Exception as e:
        print(e)
    try:
        deploy_python_closure(clipper_conn,
                              name=model_name,
                              version=model_version,
                              input_type="bytes",
                              batch_size=1,
                              func=predict,
                              base_image='hysia-clipper-base-container-gpu')
    except Exception as e:
        print(e)
    try:
        clipper_conn.link_model_to_app(app_name=app_name,
                                       model_name=model_name)
    except Exception as e:
        print(e)

    replaceDefaultEntry()
    print('{} deployed to clipper!'.format(model_name))
Exemplo n.º 9
0
    def test_fixed_batch_size_model_processes_specified_query_batch_size_when_saturated(
            self):
        model_version = 1

        def predict_func(inputs):
            time.sleep(.5)
            batch_size = len(inputs)
            return [str(batch_size) for _ in inputs]

        fixed_batch_size = 9
        total_num_queries = fixed_batch_size * 50
        deploy_python_closure(self.clipper_conn,
                              self.model_name_4,
                              model_version,
                              self.input_type,
                              predict_func,
                              batch_size=fixed_batch_size)
        self.clipper_conn.link_model_to_app(self.app_name_4, self.model_name_4)
        time.sleep(60)

        addr = self.clipper_conn.get_query_addr()
        url = "http://{addr}/{app}/predict".format(addr=addr,
                                                   app=self.app_name_4)
        test_input = [[float(x) + (j * .001) for x in range(5)]
                      for j in range(total_num_queries)]
        req_json = json.dumps({'input_batch': test_input})
        headers = {'Content-type': 'application/json'}
        response = requests.post(url, headers=headers, data=req_json)
        parsed_response = response.json()
        num_max_batch_queries = 0
        for prediction in parsed_response["batch_predictions"]:
            batch_size = prediction["output"]
            if batch_size != self.default_output and int(
                    batch_size) == fixed_batch_size:
                num_max_batch_queries += 1

        self.assertGreaterEqual(num_max_batch_queries,
                                int(total_num_queries * .7))
Exemplo n.º 10
0
def setup_clipper():

    app_name = 'predict-app'
    model_name = "predict-model"
    clipper_conn = ClipperConnection(DockerContainerManager())
    clipper_conn.connect()

    deploy_python_closure(clipper_conn,
                          name="predict-model",
                          version='1',
                          input_type="bytes",
                          func=join_predict)

    clipper_conn.register_application(name=app_name,
                                      input_type="bytes",
                                      default_output="-1.0",
                                      slo_micros=10000000)  # 10s

    clipper_conn.link_model_to_app(app_name=app_name, model_name=model_name)

    print(
        "url: ", "http://{addr}/{app_name}/predict".format(addr="",
                                                           app_name=app_name))
Exemplo n.º 11
0
    print(class_n)
    print(ret)
    return pred


# In[ ]:

from clipper_admin.deployers import python as python_deployer

# In[ ]:

# Deploy the "feature_sum" function as a model. Notice that the application and model
# must have the same input type.
python_deployer.deploy_python_closure(clipper_conn,
                                      name="sum-model",
                                      version=1,
                                      input_type="doubles",
                                      func=feature_sum)

# In[ ]:

# Deploy the "iris_predict" function as a model. Notice that the application and model
# must have the same input type.
python_deployer.deploy_python_closure(
    clipper_conn,
    name="iris-model",
    version=1,
    input_type="doubles",
    func=iris_predict,
    pkgs_to_install=['numpy', 'scipy', 'scikit-learn'])
Exemplo n.º 12
0
    print("inputs {}".format(X))
    try:
        result = clf.predict(X)
        print("result is {}".format(result))
        ret = [str(i) for i in result]
        print("return is {}".format(ret))
        return ret
    except Exception as e:
        print(e)
        return [str(e)]


manager = KubernetesContainerManager(kubernetes_proxy_addr=K8S_ADDR,
                                     namespace=K8S_NS)
clipper_conn = ClipperConnection(manager)
clipper_conn.connect()

# clipper_conn.delete_application(APP_NAME)
# clipper_conn.register_application(
#    name = APP_NAME, input_type = 'doubles', default_output = '0', slo_micros = 100000000)

python_deployer.deploy_python_closure(clipper_conn,
                                      name=PREDICT_NAME,
                                      version=VERSION,
                                      input_type="doubles",
                                      func=predict_wrapper,
                                      registry=REGISTRY,
                                      pkgs_to_install=['sklearn'])

# clipper_conn.link_model_to_app(app_name=APP_NAME, model_name=PREDICT_NAME)
# make predictions
predictions = predict(test_examples.values)
print("Predict instances in test set using custom defined scoring function...")
predictions

# In[7]:

from clipper_admin.deployers import python as python_deployer
# We specify which packages to install in the pkgs_to_install arg.
# For example, if we wanted to install xgboost and psycopg2, we would use
# pkgs_to_install = ['xgboost', 'psycopg2']
print("Deploy predict function closure using Clipper...")
python_deployer.deploy_python_closure(clipper_conn,
                                      name='xgboost-model',
                                      version=1,
                                      input_type="doubles",
                                      func=predict,
                                      pkgs_to_install=['xgboost'])

time.sleep(5)

# In[8]:

print("Link Clipper connection to model application...")
clipper_conn.link_model_to_app('xgboost-airlines', 'xgboost-model')

# In[22]:

import requests, json
# Get Address
addr = clipper_conn.get_query_addr()
Exemplo n.º 14
0
    return l


clipper_conn = ClipperConnection(
    KubernetesContainerManager(useInternalIP=True))
# clipper_conn = ClipperConnection(DockerContainerManager())
clipper_conn.start_clipper()

#Deploy lin_model_1
clipper_conn.register_application(name="linear1",
                                  input_type="doubles",
                                  default_output="-1.0",
                                  slo_micros=100000)
deploy_python_closure(clipper_conn,
                      name="lin-model-1",
                      version=1,
                      input_type="doubles",
                      func=lin_model_1,
                      registry="hsubbaraj")
clipper_conn.link_model_to_app(app_name="linear1", model_name="lin-model-1")
print("deployed model 1")

#Deploy lin_model_2
clipper_conn.register_application(name="linear2",
                                  input_type="doubles",
                                  default_output="-1.0",
                                  slo_micros=100000)
deploy_python_closure(clipper_conn,
                      name="lin-model-2",
                      version=1,
                      input_type="doubles",
                      func=lin_model_2,
from clipper_admin import ClipperConnection, KubernetesContainerManager
clipper_conn = ClipperConnection(
    KubernetesContainerManager(useInternalIP=True))
clipper_conn.connect()

clipper_conn.register_application(name="hello-world",
                                  input_type="doubles",
                                  default_output="-1.0",
                                  slo_micros=100000)
clipper_conn.get_all_apps()


def feature_sum(xs):
    return [str(sum(x)) for x in xs]


from clipper_admin.deployers import python as python_deployer

registry = 'localhost:5000'
python_deployer.deploy_python_closure(clipper_conn,
                                      name="sum-model",
                                      version=1,
                                      input_type="doubles",
                                      func=feature_sum,
                                      registry=registry)
Exemplo n.º 16
0
        Accepts list of JSON string as argument
        """
        ret = []
        for param in params:
            args = json.loads(param)
            zone = args['zone']
            date = str(args['date'])
            schedule = args['schedule']
            temps, actions = execute_schedule(date, schedule, models[zone], 65)
            ret.append(temps)
        return ret

    from clipper_admin.deployers import python as python_deployer
    python_deployer.deploy_python_closure(clipper_conn,
                                          name='thermal-model-ciee',
                                          version=1,
                                          input_type='strings',
                                          func=execute_thermal_model,
                                          base_image="xbospy")
    clipper_conn.link_model_to_app(app_name="ciee_thermal",
                                   model_name="thermal-model-ciee")
except:
    clipper_conn.connect()

import time
import requests
time.sleep(10)
inp = json.dumps({
    'zone': 'http://buildsys.org/ontologies/ciee#CentralZone',
    'date': '2018-02-06 00:00:00 UTC',
    'schedule': normal_schedule
})
Exemplo n.º 17
0
clipper_conn = ClipperConnection(DockerContainerManager())
clipper_conn.start_clipper()
clipper_addr = clipper_conn.get_query_addr()

def preprocess(inputs):
	inputArr = (inputs[0]).split(",")
	floats = inputArr[:-1]
	rounded = [round(float(i),1) for i in floats]
	rounded.append(inputArr[-1])
	output = [(str(rounded))[1:-1]]
	return output

python_deployer.deploy_python_closure(
    clipper_conn,
    name="process-iris",  # The name of the model in Clipper
    version=1,  # A unique identifier to assign to this model.
    input_type="string",  # The type of data the model function expects as input
    func=preprocess # The model function to deploy
)

clipper_conn.register_application(
    name="process-app",
    input_type="strings",
    default_output="-1",
    slo_micros=9000000) #will return default value in 9 seconds

clipper_conn.link_model_to_app(app_name="process-app", model_name="process-iris")


Exemplo n.º 18
0
# Access the trained model via closure capture
def predict(inputs):
    global model
    pred = model.predict(inputs)
    return [str(p) for p in pred]


# Point to the gradient boosting model
model = model

# Deploy the 'predict' function as a model
deploy_python_closure(
    clipper_conn,
    name="gb-model",
    version=1,
    input_type="doubles",
    func=predict,
    pkgs_to_install=['scikit-learn', 'pandas', 'numpy', 'scipy'],
    registry="gkip")

# Routes requests for the application 'kddtutorial' to the model 'gb-model'
clipper_conn.link_model_to_app(app_name="kddtutorial", model_name="gb-model")

inputs = X.loc[200, X.columns != 'classification']  # use random data point
headers = {"Content-type": "application/json"}
addr = clipper_conn.get_query_addr()
response = requests.post("http://%s/%s/predict" % (addr, 'kddtutorial'),
                         headers=headers,
                         data=json.dumps({"input": list(inputs)})).json()
print(response)
Exemplo n.º 19
0
    return pred


clipper_conn = ClipperConnection(DockerContainerManager(redis_port=6380))
try:
    clipper_conn.start_clipper()
except:
    clipper_conn.connect()

clipper_conn.register_application(name=APP_NAME,
                                  input_type="integers",
                                  default_output="-1.0",
                                  slo_micros=100000)
# Check all apps
print(clipper_conn.get_all_apps())

deploy_python_closure(clipper_conn,
                      name=MODEL_NAME,
                      version="1",
                      input_type="integers",
                      func=predict)
clipper_conn.link_model_to_app(app_name=APP_NAME, model_name=MODEL_NAME)

import json
inputs = cv2.imread('test1.jpg')
headers = {"Content-type": "aplication/json"}
result = requests.post("http://localhost:1337/" + APP_NAME + "/predict",
                       headers=headers,
                       data=json.dumps({"input": list(inputs)})).json()
print(result)
Exemplo n.º 20
0
from clipper_admin import ClipperConnection, DockerContainerManager
clipper_conn = ClipperConnection(DockerContainerManager())
clipper_conn.connect()

import sys
sys.path.append('./')
print(sys.path)

from predict_example import python

from clipper_admin.deployers import python as python_deployer

python_deployer.deploy_python_closure(clipper_conn,
                                      name="sum-model",
                                      version=1,
                                      input_type="doubles",
                                      func=python.predict)
Exemplo n.º 21
0
        response = apply_forecast_impl(parameters[0].decode("utf-8").split(','))
        code = '202' 
    except Exception as e:
        response = str(e)
        code = '500'
    return [str(code+', '+response) for _ in parameters]

if __name__ == '__main__':
    # setup logging format
    format = "%(asctime)-15s %(message)s"
    logging.basicConfig(
        filename='./timeseries/log.log', level=logging.DEBUG, format=format)
    # set up logging to console
    console = logging.StreamHandler(sys.stdout)
    console.setLevel(logging.ERROR)
    logging.getLogger().addHandler(console)

    signal.signal(signal.SIGINT, signal_handler)
    conn = ClipperConnection(DockerContainerManager())
    conn.start_clipper()
    try:
        conn.register_application(name="forecast",input_type="strings",default_output="500, Error executing call.",slo_micros=100000000)
        python_deployer.deploy_python_closure(conn, name="do-forecast", version=1, input_type="strings", func=do_forecast, base_image='wamsiv/timeseries:latest')
        conn.link_model_to_app(app_name="forecast", model_name='do-forecast')
        print(subprocess.getoutput(["docker update --restart=on-failure $(docker ps -a | grep 'clipper/query_frontend:0.3.0' | awk '{ print $1 }')"]))
        input("Server started. Press ctrl+c to stop server.\n")
    except Exception as e:
        logging.error("Encountered {}. Stopping server...".format(e))
        conn.stop_all()
    conn.stop_all()
            requests.post("http://" + clipper_url + ":1337/" + app_name +
                          "/predict",
                          headers=headers,
                          data=data_input).json()['output'])
    return results


#################################################
#################################################
#################################################

from clipper_admin.deployers import python as python_deployer

python_deployer.deploy_python_closure(clipper_conn,
                                      name="query-agent-model",
                                      input_type="doubles",
                                      func=query_agent_function,
                                      version=9)

clipper_conn.link_model_to_app(app_name="query_agent",
                               model_name="query-agent-model")

# Debugging
clipper_conn.set_model_version(name="query-agent-model", version="3")

import requests, json, time, sys, numpy as np
headers = {"Content-type": "application/json"}
clipper_url = "192.168.56.101"  # default: "localhost"
app_name = "query_agent"
# data_input = json.dumps({"input": list(np.random.random(input_size))})
data_input = json.dumps(
Exemplo n.º 23
0
def predict(delay):
    #delay = eval(delay)
    future_r = model_r.make_future_dataframe(periods=30, freq='D')
    forecast_r = model_r.predict(future_r)
    #forecast_r.index = forecast_r['ds']
    #forecast
    #pred_r = pd.DataFrame(forecast_r['yhat'][len(forecast_r)-delay:len(forecast_r)])
    #pred_r=pred_r.reset_index()
    #pred_r = pred_r.to_json()
    return forecast_r.to_json()


from clipper_admin.deployers import python as python_deployer

python_deployer.deploy_python_closure(
    clipper_conn,
    name="p1model",
    version=1,
    input_type="strings",
    func=predict,
    pkgs_to_install=['pandas', 'fbprophet==0.4'])

clipper_conn.link_model_to_app(app_name="p1app", model_name="p1model")

#import requests, json, numpy as np
#headers = {"Content-type": "application/json"}
#datas = json.dumps({"input": list(np.random.random(10))})

#requests.post("http://10.65.47.80:1337/p1app/predict", headers=headers, data=datas).json()
Exemplo n.º 24
0
    def test_query_specific_model_version(self):
        model_name = "testmodel"
        app_name = "testapp"

        def predict_func1(xs):
            return ["1" for _ in xs]

        def predict_func2(xs):
            return ["2" for _ in xs]

        self.clipper_conn.register_application(name=app_name,
                                               input_type="doubles",
                                               default_output="DEFAULT",
                                               slo_micros=100000)

        deploy_python_closure(self.clipper_conn,
                              name=model_name,
                              version="v1",
                              input_type="doubles",
                              func=predict_func1)

        self.clipper_conn.link_model_to_app(app_name, model_name)

        time.sleep(30)

        deploy_python_closure(self.clipper_conn,
                              name=model_name,
                              version="v2",
                              input_type="doubles",
                              func=predict_func2)

        time.sleep(60)

        addr = self.clipper_conn.get_query_addr()
        url = "http://{addr}/{app}/predict".format(addr=addr, app=app_name)

        headers = {"Content-type": "application/json"}
        test_input = [1.0, 2.0, 3.0]

        pred1_raw = requests.post(url,
                                  headers=headers,
                                  data=json.dumps({
                                      "input": test_input,
                                      "version": "v1"
                                  }))
        try:
            pred1 = pred1_raw.json()
            self.assertFalse(pred1["default"])
            self.assertEqual(pred1['output'], 1)
        except ValueError:
            logger.error(pred1_raw.text)
            self.assertTrue(False)

        pred2_raw = requests.post(url,
                                  headers=headers,
                                  data=json.dumps({"input": test_input}))
        try:
            pred2 = pred2_raw.json()

            self.assertFalse(pred2["default"])
            self.assertEqual(pred2['output'], 2)
        except ValueError:
            logger.error(pred2_raw.text)
            self.assertTrue(False)

        # Query a version that doesn't exist:
        bad_version_name = 'skjfhkdjshfjksdhkjf'
        pred3 = requests.post(url,
                              headers=headers,
                              data=json.dumps({
                                  "input": test_input,
                                  "version": bad_version_name
                              }))
        logger.info(pred3.text)
        self.assertFalse(pred3.status_code == requests.codes.ok)
        self.assertEqual(
            pred3.json()['cause'],
            "Requested version: {version_name} does not exist for model: {model_name}"
            .format(version_name=bad_version_name, model_name=model_name))
Exemplo n.º 25
0
heatmap = 'data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/4QD4RXhpZgAASUkqAAgAAAAMAAABAwABAAAAZgUAAAEBAwABAAAAqwIAAAIBAwAEAAAAngAAAAMBAwABAAAABQAAABIBAwABAAAAAQAAABUBAwABAAAABAAAABoBBQABAAAApgAAABsBBQABAAAArgAAABwBAwABAAAAAQAAACgBAwABAAAAAgAAADEBAgAQAAAAtgAAAGmHBAABAAAAxgAAAAAAAAAIAAgACAAIAIBPEgAQJwAAgE8SABAnAABTaG90d2VsbCAwLjMwLjIAAwABoAMAAQAAAP//AAACoAkAAQAAAIgAAAADoAkAAQAAAIgAAAAAAAAA/+EJ9Gh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8APD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNC40LjAtRXhpdjIiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczpleGlmPSJodHRwOi8vbnMuYWRvYmUuY29tL2V4aWYvMS4wLyIgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iIGV4aWY6UGl4ZWxYRGltZW5zaW9uPSIxMzYiIGV4aWY6UGl4ZWxZRGltZW5zaW9uPSIxMzYiIHRpZmY6SW1hZ2VXaWR0aD0iMTM2IiB0aWZmOkltYWdlSGVpZ2h0PSIxMzYiIHRpZmY6T3JpZW50YXRpb249IjEiLz4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA8P3hwYWNrZXQgZW5kPSJ3Ij8+/+0APFBob3Rvc2hvcCAzLjAAOEJJTQQEAAAAAAAfHAIAAAIAABwCQQAIU2hvdHdlbGwcAkYABjAuMzAuMgD/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/2wBDAQMEBAUEBQkFBQkUDQsNFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBT/wAARCACIAIgDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD7F/aM+PviP4U+LLXTNGFjLHNZJOIriBnkLl3XghgMfKOK87tv2uPG/knz10oSIMyf6KykHsMb60/2vLUt4/s3SMNu02NJGIJ2jzJMYr5/l0OJrhphNtkhGcEn977YoA98h/aj8d3Cb410tyi5eNbNs+2Pnr55/aK/4KB/HX4SPDd6bH4cNjIQBDdaVI0g+pEo/lW1ZaikYiLAwBT88G872Prz0FcB8cvC6eONKu5LhEfbAyIANzNkdjnqKAPMbb/gsL8fZ51j8nwgCf72lyAf+jqZJ/wWL+PYdgsHhIDPfS5P/j1fE+qWU2iatdWb7klgkaM4PPpip/DugXPiTV7axt42Yyuqs4UkICeSfSgD9YfhH+3/APF/xV4BtfEGu23h1VuNzLJb2TooAPcGQ/nWppX7d3xV1q4GoRJoEfh4MVFylg7lyD0B8yvmPSLuDw74Q0/R7i7gXTYY/Iit4QfOZQOWYZ71458S/jitlp+lWXhqd7GCyldDZFeCufv0AfrHpX7Uvi+78KTatJb6fsTkTfZ2WMjvxuzTdD/aw8QeMov+JRNpcDqcFrmAlT6jh6+Yfh58bdC1z4OaJPJfRrFHZvG9qsg3zzY7jtXxSPi9rfhfWLlrS+ktU+1uUglkyMk8Dcvp6GgD7O/aM/4KB/tI/AvxdNAbHwxdeH5W3Wt6NLkYbf7rMJcA15Rqf/BYz44PclrG18LRQED93LpkjMp7jPnc1z/hL9r+Lxj5GheK7OC/t4/3crXqqY7hc8gqR29a8v8A2pPDPhcXll4h8I2sNlp0/wC6kht8eXu6/Lj8aAPdNL/4K9/HW+8155fBdpHEu4+Zpcu5vZR53JrqNE/4KsfGbxBeSw2E/heaPaCG/sWXdH6lx53T3r867W0e6lCJjeR8oY43H2Ne8/st+DpLnxRetckJLcW5t7eDJ/esTySR2FAH6L+Ff2zvjDrkFpJO+gxvIAXj/stxkf3lPm9K6GH9rD4vz6y8fl6BHYBTsZrJ97H1/wBZ0rzbw5ocOg6clndkM0cYwOfMjx1GakF8uJpZFzGoxDCr/MR9KAPWYv2s/iPJaN5cejzSxcO/2RgrfT56K8jKv9gBDlIQchyRhW/ukGigD3v9quec/E+xSNpIY49LjkeUfdx5kmRj1rxDUYLY2r3lmHmlbqHkKnHr7V71+1HPK/xHsrfyQ1v/AGbGzyMcKo8yTr3rxCfTo/txMjIIiR5aq4MbfX2oAbplg2qaJMZxFNcMu1Qw/eAf73eqmtRhNHjtrVFnurZcOioGGMdPrXWaYY5r5k+zxiboYUIcRr6qRWNqunf2bZ69KlkYHaF3R0csWwOu09KAPye+Kha4+JPiI+SYna8f91jlfapvCPxEvPCkZhtIEjn3DY6DGD33DvWfqVvqGqeMdSLTr9vFzJI0s8gU5DdcnvVbVY73R9XW4uHgnupR5hZWV1JP04oA3tY1/X5/GkpF5G2pXDKhmibCsD0HtV/UdCj1TxVY2otzHO0JW6wpdUbH3s+9c1oEM7TebBBb3EszYSad/wDUsOScV38F9qN1K0004Nm+ImV4vKO8d1I6igDFWLxD4bsLfRJJBZ6a8zSfa4VBYf8AAqx7rTzotzHHdWUtzNMT5YuG3IN3ST5e/tXYeJfENxHNDb2diNSlZNnl7d6//rrn4/FOsaLfefb3cmlQf6uaK6QMN3psxnH0oAzNf8Fal4VhZtTtRA5YHPnDMqN02r1qvY6fNfXwsrS4nETDcttNncFxzx0qz4l8RaTq19FcJbXE024GaaWTAY/7I7Cuq8ceE59I0ex8TaNIt/Z3sKm4faWe1JHCt6fhQB51MsUt60EELCJW2iItyxHU5r7L/Zh8ETGXSLqyibdbr5il2MTr6j3HvXxzptxa21u0k0cd05cEwMSrDHdWH8q/TX9mSztrrwtBf2KkxSWqsLqYYTGOUJ9aAO/vnCTG7W2/fkFZlGTketUzpwezt45kMEjkvDJbqN+PrXRvBdX0U8MZSSNTkRqcKR/vVgahfuSm8E/ZQUOxgNmfUd6AMe9Rbm5MIke4T7xZsdR6+9FWZ9J82K0+z7TMMs0zj5Svpx/WigD0L9tKx1ix+Pmla1aagDYLocUD6exwpfzZiHP5gfhXxL+0x8UNZ0u30rS9Juba21lSxulimIkCnpjB5FfVX7fs90/7SeiR6bHeWF7BoMDtqBy1nODPPiCUfw9Cd3vXnfwv+Ful3PiabxL4j0jSv7XLj5bdfOEfoQzdQaAML9k7wZr9tolx4m1q6vGdE/dCUt++z9a9I+Kviu58LeF7rT9Ome41fVozHHCfnKZ689q7XxXrs6aTK2mWwvLpAQbeACNCo9B0zXkngfw/q/iHxEmra8kkDPny23ALEB2x60AeMeBv2HtM1q6Fx4ikuYhNy+yX5tx5JxXfax/wTj8EalZhNP8AEWpWF3EhA+1fvEb06dBmve7S+ik1aPa5QhShDtkyD1xXYwHztPtH2vbyI/zKeEkX3oA/Mf4jfsOfE34YIb61hj1iBSSslgx3FexweteU6jqPiXQ7e70XVdGlW9uGDI0yMHi/3QOK/au8ZLyWzgVRJLMSHjkBaMj0PtXwT+2t4aHhDV7bVo4kg+1TmCUQx4jjB6Ec0AfJ+v6q1zpFrDJanT9YgKtG0XyHPcnFZ3/CDavrF3HJc3sEklypdJXm3F2/u/Wr2sa3bXus6FDdaVNAlq+yR/MJafJ4Ir25tCtbWbTbqJkuWiO5UZMtET0GB1NAHlvhr4E39yYX1KOXzJVLJbRrjJHQFvf2rqtF/Z6+NHiuzez0+1u4dORfKENxceVGU7AA9RX3r8B/gSkaWmv69GReXEXmxW8y5wO2PQ16xrnmNbxQPayRWOcBmTEqN6nHagD8u5f2LfGNjYFbySytNShJLokhkDL9R3+le1fsbfFc+Bnv/hz4pj8m7jlP2J58hJhnkDPpX1Vq+l2MkbPcYkuoDuQj5VY+9fO3xv8A2dr/AMZx2uv2+oSacssjT2k/2cxguhwTG+PmAPWgD6M1sahDoN//AMI/DDqGoNEZooJmzExA+7t7V8eaR+034tuNb1HQtd8LabomooxaL7T5iyvg8gZ4Irc+Dv7T2s+GPEdr4N8eRyXl+032fTtagiAWfsFdR/OvpTxL4a0XxvbfaNe0GN3i+XzolCvDn+LPWgDwaabxr440WC8t9Qt7LcRJ5FqCkLqOoZvX6UVqeKviS3gpYbFdPg1CC0k8tHMoj3Re3GCaKAIv+CvvjbxF4W+P2h22lalLZ2V14ftmkjQYV2E9x1P5cV8Z/Dz9orxZ4F8UW17qGoXWq2KN+/spZOGX/ZPY19pf8Fj9EvU+Leh6z5Rm0waFDbyKwO0P585ByOh5r84ryOexhMEkWY5cSLK6EE/Q+lAH6GeD/i/dfGlrOTTzL4XsMFpDK2RIvu3+Fey6f4dWOwsk0+bKx5LO53K59cGvkn9jX4gWOqQnQ9VZf9Djx+927dmeCo7mvu3S7aK6tkmtJoroKgVYyu3YPwoAzNKtsyxo0ce1hu84BTgj37V6noXhibxJZLJGU+yKMkSSfJkegrhPDGhIb8NIZmVXOYuAhPr717p4bv4rC3iWWPEaqRtW3yB9KAK9h4BukSNiwNu/KFACuPQmvlT/AIKIfs+ap4n+FVxq1lZhptMb7UogBbco6jHrX1VrHjefSdLkMvyxq5IJbovso6Vzmp/FjS7vQJYJoEvfO/dvFM2VVD3IoA/CnUVS+1PTPtExS3kTKrJnKSd1b05r6q/Zs8H3vxQ+JGiWNkqiOzQSX8sA+SLHRcnqTX0J4v8A2OPhF401d7+3vXsWupvOcx7vLD9ce1ewfCefwt8FNNnsLCIO4cKk7xKPMYcZz1NAHuFj4BYaNbRGSS1ndAoRuflH8iawvEvhC80u7hRd4LxnJaQbse45zXSaZ8QbXWLxTC23UJogSZD8rH09qq6tqNrPPJIsE8N0q+W7+YV5PZQetAHndt4b0e08Kax4u8bajBpfhfSZGb7P5kcb6iyKWMIeRlAZjtUAHkkjI614nqmv638W/EI17WpG08xwfZtL0Cy/48tItTgiFBwHkOF8yXHJACgKoFd7488L2d74u+1XemtO8MIS0uL2cyGzHcQqx2xZOSSgDMSck1S0Pws1091EBG6ldyyh8OPegD5l/aQ+FkeoaQ+qRWiw3enoXtru1ISWJ+uQcj9a8T8D/tw+JdB8MXmieJIG1qaKEwWl6rbZAen7z+99a9r/AGy/GtrZ+E77S4ibRVj2eYjlGkfpyO9fnnzIT2PQADrQB3PxE+LusfEUWqXipbxW+SgiJ3EnqSe9FVPAHg+LxNqgWSRm8hg8tsiZYqDyOetFAH6v/wDBSrRZ9W8f6Uohhe2/slA/nEkMfMl42j+dfmF4/wDBF3pGlsqvcS2cUjOBMB+7J6Be+2v2J/bB8MWWufEqya4j3Tf2VGiEOQQPMk7Cvi7xh8B/tEzFzIyzuQw3cIn49TQB8HeE/E154O8QWuq2bESwPkgHhh3FfaPwS/bht9FC2njOEJp1xIPIuEO8xj3I9K+SPir4Dm+H/i+902VZltSxe1llXHmL61l6dqAOkXGmTxq6zOPJmcZ2c87aAP2m8K634e8f2q3/AIZ1YazHKodXtZwwiPXBXtW4PFN1odrcAxzrdyfJDBK23J9RX4u+HvG+tfCPxBaXvhrW7uB4sGQQb4o5PUMM4NfqB8BPihZfGrwNZ3l9cB9RjTElzu4Df3c9jQB6EBf64bq4vLqcIBzE3CE+ma4a+hvIJnWS4hjnB3CAHKbPUt617JLqrjRrWFrCGOOEeWvybiw9T7+9czr/AIQg1NYbqzjW24/eIwOW+lAHKWN6P7LvVmQtcgAoYzhcepHf61z9tC18SIJIndHA82VixQn2rf1SznupPm/cXkKgPk7cRD14rQfwaJbu2Ecwto5oxMwXo4HuKALfhdbmK1uYDvi+yncj4P7x/wDZPXFddYeP9XuLbOoZlmtxiIvwCam0ub7FbRpuSGYkYH3jtHr6Vz/jsJHtu5TIZBKHJVtsYQdTigDM8SahqOsXMd9NE0Qjzl59vlZ9zXkPxK/ap8D/AAnhvUj1eDUvEhiIFvbNvXd/d9BXyJ+1J+0R4i+KHjXVtG0jVZLLwvpchSK0s5WQzkdWOPvV4BbJda2IoVZJJI3LZYnz8d8nFAG78Wfi7rHxc8RTanqoWEsx2wxkhVHYYrn7DRStjHqEnnRo8ojiEaEsx7kHpXQ+GPCDa/4qbT9PE91CRuaRIvN2v/dYkV9P+GfgZc6Zp8ReH5p8M0Mv3Y8d9p6UAeY/CfwpqOlW88t7aShlcSx2txGrSSp16jmivqVvhvFFoNtIttN52M5h+/8AXPpRQB9jftSxb/iHZsYgUXTY90nO4fvJOBXlaadGdNWNoEaINhbhzudCe2DXqX7U0pb4h2lvEw85tMjdlJJ+USSdq89sJ5pYS8USGWaLZI0g+6PbtQB8/wD7RXwItfG/hspapBJe2Tb33KHdl6/KRXwHfxXfhqSTRry0gmRrkxWxuhzB82N2Ov51+qeqWgiiuLeRXSCFDh0cpvJ9fWviD44/A698WX2r+IbJREbOPAg3gyyY/wBmgD531XSpYdOmk3tdTQ3LRvIjEqABnOPSvc/2QvjO/wAOviLpVhdvDB4S1aTy7iO5bCRS44cH1zio/wBnbwV/wsPS9R8OXEEFvLNG8XlorLMz46k15RLZ3fg3xU2kajaXMsumXDxqQpypU8EJjmgD9nrHWJo50vZH3K4whkX5Hi7FD0q/eW0F2JL1nLPIm0RFyFVfU15d+zf49i+JHwz0G589ppFg8mY8blxxgqehr0nUpQ1qlrDI9raR5jdwA272JNAGLq+laNosyfatRt7a31RRFDFPcZluXHZM9vardlClsrQWizhwAkhK7lUf3c9q8k+NX7PzfF3x34H1u38SfYoPDEqsdPjBUE7gwIb1yMfSvabaIS3cyxzPbzI+ZZYjgPx6d6AGS3UUTbIcTvjbtb5R78968G/a9+KEfw28BzfvU+1yRlIED7VkyOQp9q93vAbXTriQNCUiYyOZHGFQcnJPSvyh/ai+LNx8Z/idqN9DJFFoOkM1rZ27t+7bHDEAevrQB5JaRfbEknCSG5unZkaNMvuJ6A5q1bWj6fax6db7m1y4mCbbdjvGegOP5Vp+A4LN9QutUuZLvTdGsITNiFwXMvZVz1ya7b9l9IvF/wC0DpTpYf6QzPMiBsqWHOWzQB9dfsz/AADs/h54Ugn1WUXWpXJ8+QSrgox5Ax3r3C802BLF57gxxxSgqQ6AFsdMAc1saUbrzJri4tLbhRvDYDRkenrTNX+yQzrJezHftDhWCoCPagDlNStz9kiDxvFKUwqDIXy/p1oq5Neh76Z5JZHibhHJDDZ/dz2ooA9e/adhR/irZP5/2Z10qMFzj5gZJOOa80u9Rh0q4+xPceSJYd5WNBk+2a9B/awvCvxLsrZUU79MjZy2MhRJJyM1876/rbGWB1uXZyfLhllUHbQBLqXi1NVtbqG2kfdJ8kiSNlowO4FeeeN9esdF8Da3eLeJHc28BAnSDLkY71smzayubmfdK4iUtLdBRmRj29hXy/8AGjxH4p8W6rceH/C1je3hRCb5rSMlSp6KSOKAOZ/Z2+JN/wCEPHpn0+8hCPcfaEluSVdnznZ75pPjx4pRvitqfib+ypbC51FRJJDGThH7sG6YPpXFWGja78PNR02DWdIl0xri7jdHuosF16cH0r6b0HwjovivS9a0TV5ZI9SkQyQpIdxkBHG0Ht9KAPHv2bf2g9Y+D3ii41D7LcXeh3DmWe3R2CRt3bP9K+5rb9uj4eap4YW8k1mwtZyhaS0kB8zdjgY7mvzy8f8Awv8AFHw3iiRIr5tIug2HyQh/AdK4ye+g/sE2kEcL26kMzyqBOrnsDjpQB9T6j/wUT1+11m+XTtFt7jS5WKqLhzuIzwwA6V7D4B/b78FyeE47zXZZbHUIVJbT0UszOP7rdxX5rDjnqKtQyv8AZJoRIiISHww5Yj0NAH118af27L/4nadP4c0LS1tbC+bbLNK2x2U9uPavmW20+FY7h9hSCPcyw3eeW9QV6/jWDDaLKYAky+Y5O5QPue9dEZbzUNNEVkZLxWPlyzSAKV9gM9KANzRPF76V4Gjt5YFmuHuC6CSEt5sfoW6AV7r+xDbWt/4j1/W0ijTUJD5Swx5/dqecA+hrw+WzXR/C8VrLb3F3d3R+z29shO3eehA619Sfs8fs83/gbwoLzULg2et3oEjwyOdkK9un8VAH1J4autyvZXEMhR5OqcmIj1zXT6obSOVb/Ec6NCYnDjdggcH2r548K+Ldb8Lay+m6rvME0+INQQgmQf3TmvZ5tUlfTZXaIqlydojVQcD1oAzZtaOn2ti5QRxOWWSXG4KPcd6K47xDqmo2d3DFDEDbRAllfnI9cUUAe0/tq3kknxYsbTZIsQ0iGQyRgbiTLKNvrXibWMdrCgCrC64KgruIPvnoa9q/bauUT4rWEYmKSnSYTtTrjzZefpXidrq8qWrXFzEqAtsLrwX/ANqgCzqxku4ktZhkSDJQvwR74FWPDsEmmpEdMtLM2wJEjGMIjDvuI61JazyX80chckKMZJCq6+2epoOpPbSkFEkh3fu0Y4478DvQB82/tzarHdeDPDgDf6S16xjBVd20DoO4GawvhV8Q0vrzR9ctrdJ9W0tVjvo4xvEcGMHk969n+IHw40/4i+IrvXb23WeKCydLWCbgM2ORjsfevlX9l/U7fQ/jZFo2rpJaafdSvHLB/dYZwD6igD7a8X+CdM+LPhG7VGKWN8oms3RS20Y5HHQ1+ePxf+Fkvw78ZXGlQB5k2eeikEMid819laf8Rb/9n/4profiZQfAuuSsdMvxJkRMeit/dHNej+NPA+laprelapd6R/anlZMywkMnlnoc/wAXFAH5X28cs29I13DbuYewpIioY5UFSDyeo96/R3Vv2U/hF4h1mXUWtJ9NjmUu1vFKV/EL2qpbfsu/CGC02PpE14+CxWGVjIE9c9jQB+edrPEgCT7vLB3KY8bgfrXXaWLnUZrOxstAlW5nAKP5hAYH+Lp/Wv0M8O/DXwfoGjrpGjfDW2tdPdSZL7WCrtN+fJryf9oe68L/AAv8FTWmkQWlvrd4QsW2UYjPoB1AAoA8C8LaqsHxC8PW+oQWs66LdKsksRKuW9CDwa+/rKGS8tVvLeclLk7mjc8quOwr4j/Ze0iHV/Guoz6tprzSKuXvw4aEP246Zr7a0ywgjs0VYTeSqeBcTBM/QDtQBz3iPw3cXN3p0lqqo0bl/wB6fkYf0rotEv5o7F97CK3DZ/c/NvP41JN5MGn3kMiDYpzJ83zR57LWYsts9oEtlkh8pSBIsg5oAPFExu4Li4tzPOsYXBbCbvYDvRWBe3Fzq3lWxE1vFEf9ZG2S5/2s9KKAPdv25/NPxg0sQeaD/ZEW94kzsXzZeSa8Esbi0muHMjTM0fyxYX5H9yKKKALxcFWnmmR1BzHC5PBHYEdKmsWdvIvJIfs0ExOMEvtb3PYUUUCZr6Vtvdk11JIIQGRpHTKk+1fCPxjuB8L/AI9tq9rDHOqyCUI4O1h0J+tFFAz6u8QeCbH40fDR9Gv5Yrh7m0F7pV5Eudr4zt3dueK+dvg5+0b4x/Z88ZR+HfFvmzaJE32aeG6Us8Kf3kPeiigfQ+ltN/aK+G3iSQQW/iW3Sc5kZ7vEe1OuASOaWf41eCLezzb+KtMlR348qdVcAf3vUUUULYR5/wDE79oC5+LU6eC/h7HJq185RZfEEIKR2SZ52Hv9az/HPw38AfBnwjdaj4nt5PEGtXCArfai/nXE8xHZT0FFFAFj9mvw3Onh+W+a0XTdM1CbzbdIwN+73XrXv2pNL5UpkX7iAK6EEsfTHaiigCm8qmzjjZUcPhmbBwh9G9aytXNgomtvMmjspQNghXaRJ6g0UUFWMww/YnhTzHa3K4LSc7m9SaKKKAsf/9k='


#input is type bytes string
def predict(input):
    print(input)

    return [[2, heatmap], [0]]


model_name = "test1"
model_version = 2

python_deployer.deploy_python_closure(
    clipper_conn,
    name=model_name,
    version=model_version,
    input_type="bytes",
    func=predict,
    #pkgs_to_install=['json']
)

# #link model with application
# clipper_conn.link_model_to_app(
#     app_name = app_name,
#     model_name = model_name
# )

print(clipper_conn.get_all_apps())

print(clipper_conn.cm.get_num_replicas(name=model_name, version=model_version))
Exemplo n.º 26
0
#input is type string
def predict(input):
    logger = init_logging()
    logger.debug("DEBUGINGG>>>>>>>>>>>")
    logger.debug((input[0]).encode())
    #print("xxkdjkfjdkfjd", file=sys.stderr)
    #df = pd.read_json(input[0], orient="columns")
    #df.to_csv('bla.csv')
    #return df['nausea'].tolist()[0]
    return [[2, True], []]


model_name = "consultation"
model_version = 31

python_deployer.deploy_python_closure(clipper_conn,
                                      name=model_name,
                                      version=model_version,
                                      input_type="strings",
                                      func=predict,
                                      pkgs_to_install=["pandas"])

# #link model with application
# clipper_conn.link_model_to_app(
#     app_name = app_name,
#     model_name = model_name
# )

print(clipper_conn.get_all_apps())

print(clipper_conn.cm.get_num_replicas(name=model_name, version=model_version))
Exemplo n.º 27
0
# Deploy Sum function
clipper_conn.register_application(name="Sum",
                                  input_type="doubles",
                                  default_output="-1.0",
                                  slo_micros=100000)


# Define model func
def feature_sum(xs):
    return [str(sum(x)) for x in xs]


# Deploy python model
python_deployer.deploy_python_closure(clipper_conn,
                                      name="sum-model",
                                      version=1,
                                      input_type="doubles",
                                      func=feature_sum)

data = json.dumps({"app_name": "Sum", "model_names": ["sum-model"]})
r = requests.post('http://localhost:1338/admin/add_model_links', data=data)
print(r.status_code, r.text)

# Deploy average function
clipper_conn.register_application(name="Average",
                                  input_type="doubles",
                                  default_output="-1.0",
                                  slo_micros=100000)


# Define model func
Exemplo n.º 28
0
    else:
        if len(info['linked_models']) > 0:
            model_name = info['linked_models'][0]
            version = str(
                int(clipper_conn.get_current_model_version(model_name)) + 1)
        else:
            version = '1'
        new_app = False
    print('    version: %s' % (version))

    #-----------------------------------------------------------------------
    if args.deploy == 'python':
        if input_type == 'double':
            deploy_python_closure(clipper_conn,
                                  name=model_name,
                                  version=version,
                                  input_type=input_type,
                                  func=testmodel1)
        elif input_type == 'string':
            deploy_python_closure(clipper_conn,
                                  name=model_name,
                                  version=version,
                                  input_type=input_type,
                                  func=testmodel2)
    elif args.deploy == 'pyspark':
        if input_type == 'double':
            deploy_python_closure(clipper_conn,
                                  name=model_name,
                                  version=version,
                                  input_type=input_type,
                                  func=testmodel5)
Exemplo n.º 29
0
    slo_micros=10000000)  # 10,000,000 micros == 10 sec

clipper_conn.get_all_apps()

#################################################
######### Define Own Prediction Function ########
#################################################

import sklearn
import numpy as np
from sklearn.neural_network import MLPClassifier
from sklearn.externals import joblib
from clipper_admin.deployers import python as python_deployer

for version_postfix in ["10x1k", "10x2k", "20x1k", "15x2k"]:

    model_path = "../../models/sklearn/"
    model_name = "dig_nn_model_" + version_postfix + ".sav"
    clf = joblib.load(model_path + model_name)

    def clf_predict(xs):
        return clf.predict(xs)

    python_deployer.deploy_python_closure(clipper_conn,
                                          name="digit-nn-model",
                                          version=version_postfix,
                                          input_type="doubles",
                                          func=clf_predict)

clipper_conn.link_model_to_app(app_name="digit", model_name="digit-nn-model")
# clipper_conn.stop_all()
Exemplo n.º 30
0
            clipper_query_port=1337 + node_id,
            clipper_management_port=2337 + node_id,
            clipper_rpc_port=7000 + node_id,
            redis_ip=None,
            redis_port=6379 + node_id,
            prometheus_port=9090 + node_id,
            # WARING: DO NOT CHANGE THE RULE OF NETWORK NAMES
            docker_network='clipper_network_{}'.format(node_id),
            # SINCE THIS IS USED BY reset.sh TO IDENTIFY CLIPPER CONTAINERS
            extra_container_kwargs={})
    )  # for node_id in range(args.num_nodes)]

    try:
        clipper_conn.start_clipper()
        clipper_conn.register_application(name="default",
                                          input_type="string",
                                          default_output="",
                                          slo_micros=100000)

        python_deployer.deploy_python_closure(clipper_conn,
                                              name="echo-model",
                                              version=1,
                                              input_type="string",
                                              func=echo_model)
        clipper_conn.link_model_to_app(app_name="default",
                                       model_name="echo-model")
    except:
        exit(1)

    exit(0)