def test_model_bad_metrics():
    user_object = UserObject(metrics_ok=False)
    app = get_rest_microservice(user_object, debug=True)
    client = app.test_client()
    rv = client.get('/predict?json={"data":{"ndarray":[]}}')
    j = json.loads(rv.data)
    print(j)
    assert rv.status_code == 400
def test_model_no_json():
    user_object = UserObject()
    app = get_rest_microservice(user_object, debug=True)
    client = app.test_client()
    uo = UserObject()
    rv = client.get('/predict?')
    j = json.loads(rv.data)
    print(j)
    assert rv.status_code == 400
def test_model_ok():
    user_object = UserObject()
    app = get_rest_microservice(user_object, debug=True)
    client = app.test_client()
    rv = client.get('/predict?json={"data":{"ndarray":[]}}')
    j = json.loads(rv.data)
    print(j)
    assert rv.status_code == 200
    assert j["meta"]["tags"] == {"mytag": 1}
    assert j["meta"]["metrics"] == user_object.metrics()
def test_model_bin_data_nparray():
    user_object = UserObject(ret_nparray=True)
    app = get_rest_microservice(user_object, debug=True)
    client = app.test_client()
    rv = client.get('/predict?json={"binData":"123"}')
    j = json.loads(rv.data)
    print(j)
    assert rv.status_code == 200
    assert j["data"]["ndarray"] == [1, 2, 3]
    assert j["meta"]["tags"] == {"mytag": 1}
    assert j["meta"]["metrics"] == user_object.metrics()
def test_model_bin_data():
    user_object = UserObject()
    app = get_rest_microservice(user_object, debug=True)
    client = app.test_client()
    bdata = b"123"
    bdata_base64 = base64.b64encode(bdata).decode('utf-8')
    rv = client.get('/predict?json={"binData":"' + bdata_base64 + '"}')
    j = json.loads(rv.data)
    sm = prediction_pb2.SeldonMessage()
    # Check we can parse response
    assert sm == json_format.Parse(rv.data, sm, ignore_unknown_fields=False)
    print(j)
    assert rv.status_code == 200
    assert j["binData"] == bdata_base64
    assert j["meta"]["tags"] == {"mytag": 1}
    assert j["meta"]["metrics"] == user_object.metrics()
def test_model_tftensor_ok():
    user_object = UserObject()
    app = get_rest_microservice(user_object, debug=True)
    client = app.test_client()
    arr = np.array([1, 2])
    datadef = prediction_pb2.DefaultData(tftensor=tf.make_tensor_proto(arr))
    request = prediction_pb2.SeldonMessage(data=datadef)
    jStr = json_format.MessageToJson(request)
    rv = client.get('/predict?json=' + jStr)
    j = json.loads(rv.data)
    print(j)
    assert rv.status_code == 200
    assert j["meta"]["tags"] == {"mytag": 1}
    assert j["meta"]["metrics"] == user_object.metrics()
    assert 'tftensor' in j['data']
    tfp = TensorProto()
    json_format.ParseDict(j['data'].get("tftensor"),
                          tfp,
                          ignore_unknown_fields=False)
    arr2 = tf.make_ndarray(tfp)
    assert np.array_equal(arr, arr2)
示例#7
0
    else:
        user_object = user_class(**parameters)

    if args.service_type == "MODEL":
        import model_microservice as seldon_microservice
    elif args.service_type == "ROUTER":
        import router_microservice as seldon_microservice
    elif args.service_type == "TRANSFORMER":
        import transformer_microservice as seldon_microservice
    elif args.service_type == "OUTLIER_DETECTOR":
        import outlier_detector_microservice as seldon_microservice

    port = int(os.environ.get(SERVICE_PORT_ENV_NAME, DEFAULT_PORT))

    if args.api_type == "REST":
        app = seldon_microservice.get_rest_microservice(user_object,
                                                        debug=DEBUG)
        app.run(host='0.0.0.0', port=port)

    elif args.api_type == "GRPC":
        server = seldon_microservice.get_grpc_server(user_object, debug=DEBUG)
        server.add_insecure_port("0.0.0.0:{}".format(port))
        server.start()

        print("GRPC Microservice Running on port {}".format(port))
        while True:
            time.sleep(1000)

    elif args.api_type == "FBS":
        seldon_microservice.run_flatbuffers_server(user_object, port)
示例#8
0
 def rest_prediction_server():
     print("Staring REST prediction server")
     app = seldon_microservice.get_rest_microservice(user_object,
                                                     debug=DEBUG)
     app.run(host='0.0.0.0', port=port)
示例#9
0
    if args.persistence:
        user_object = persistence.restore(user_class,parameters)
        persistence.persist(user_object,parameters.get("push_frequency"))
    else:
        user_object = user_class(**parameters)

    if args.service_type == "MODEL":
        import model_microservice as seldon_microservice
    elif args.service_type == "ROUTER":
        import router_microservice as seldon_microservice
    elif args.service_type == "TRANSFORMER":
        import transformer_microservice as seldon_microservice
    elif args.service_type == "OUTLIER_DETECTOR":
        import outlier_detector_microservice as seldon_microservice

    port = int(os.environ.get(SERVICE_PORT_ENV_NAME,DEFAULT_PORT))
    
    if args.api_type == "REST":
        app = seldon_microservice.get_rest_microservice(user_object)
        app.run(host='0.0.0.0', port=port)
        
    elif args.api_type=="GRPC":
        server = seldon_microservice.get_grpc_server(user_object)
        server.add_insecure_port("0.0.0.0:{}".format(port))
        server.start()
        
        print "GRPC Microservice Running on port {}".format(port)
        while True:
            time.sleep(1000)