def main():
    # get url parameters for HTML template
    name_arg = request.args.get('name', 'mnist')
    addr_arg = request.args.get('addr', 'mnist-service')
    port_arg = request.args.get('port', '9000')
    args = {"name": name_arg, "addr": addr_arg, "port": port_arg}
    logging.info("Request args: %s", args)

    output = None
    connection = {"text": "", "success": False}
    img_id = str(uuid.uuid4())
    img_path = "static/tmp/" + img_id + ".png"
    try:
        # get a random test MNIST image
        x, y, _ = random_mnist(img_path)
        # get prediction from TensorFlow server
        pred, scores, ver = get_prediction(x,
                                           server_host=addr_arg,
                                           server_port=int(port_arg),
                                           server_name=name_arg,
                                           timeout=10)
        # if no exceptions thrown, server connection was a success
        connection["text"] = "Connected (model version: " + str(ver) + ")"
        connection["success"] = True
        # parse class confidence scores from server prediction
        scores_dict = []
        for i in range(0, 10):
            scores_dict += [{"index": str(i), "val": scores[i]}]
        output = {
            "truth": y,
            "prediction": pred,
            "img_path": img_path,
            "scores": scores_dict
        }
    except Exception as e:  # pylint: disable=broad-except
        logging.info("Exception occured: %s", e)
        # server connection failed
        connection["text"] = "Exception making request: {0}".format(e)
    # after 10 seconds, delete cached image file from server
    t = Timer(10.0, remove_resource, [img_path])
    t.start()
    # render results using HTML template
    return render_template('index.html',
                           output=output,
                           connection=connection,
                           args=args)
def main():
  # get url parameters for HTML template
  model_arg = request.args.get('model', 'kfserving-mnist-01')
  server_name_arg = request.args.get('name', 'kfserving-mnist-01.kubeflow.example.com')
  server_ip_arg = request.args.get('addr', '10.108.37.106')
  args = {"model": model_arg, "name": server_name_arg, "addr": server_ip_arg}
  logging.info("Request args: %s", args)

  output = None
  connection = {"text": "", "success": False}
  img_id = str(uuid.uuid4())
  img_path = "static/tmp/" + img_id + ".png"
  logging.info("img_path " + img_path)
  try:
    # get a random test MNIST image
    x, y, _ = random_mnist(img_path)
    # get prediction from TensorFlow server
    pred = get_prediction(x,
                           model_name=model_arg,
                           server_ip=server_ip_arg,
                           server_name=server_name_arg)
    logging.info("pred " + pred)
    # if no exceptions thrown, server connection was a success
    connection["text"] = "Connected (model version: )"
    connection["success"] = True
    # parse class confidence scores from server prediction
    scores_dict = []
    for i in range(0, 10):
      scores_dict += [{"index": str(i)}]
    output = {"truth": y, "prediction": pred,
              "img_path": img_path, "scores": scores_dict}
  except Exception as e: # pylint: disable=broad-except
    logging.info("Exception occured: %s", e)
    # server connection failed
    connection["text"] = "Exception making request: {0}".format(e)
  # after 10 seconds, delete cached image file from server
  t = Timer(10.0, remove_resource, [img_path])
  t.start()
  # render results using HTML template
  return render_template('index.html', output=output,
                         connection=connection, args=args)
Exemple #3
0
def main():
    # get url parameters for HTML template
    name_arg = request.args.get('name', 'mnist-classifier')
    addr_arg = request.args.get('addr', 'localhost')
    port_arg = request.args.get('port', '8080')
    args = {"name": name_arg, "addr": addr_arg, "port": port_arg}
    print(args)

    output = None
    connection = {"text": "", "success": False}
    img_id = str(uuid.uuid4())
    img_path = "static/tmp/" + img_id + ".png"
    try:
        # get a random test MNIST image
        x, y, _ = random_mnist(img_path)
        # get prediction from TensorFlow server
        pred = get_prediction(x,
                              server_host=addr_arg,
                              server_port=int(port_arg),
                              deployment_name=name_arg,
                              timeout=10)
        # if no exceptions thrown, server connection was a success
        connection["text"] = "Connected to Seldon GRPC model serving service"
        connection["success"] = True
        # parse class confidence scores from server prediction
        scores_dict = []
        for i in range(0, 10):
            scores_dict += [{"index": str(i), "val": pred[i]}]
        output = {"truth": y, "img_path": img_path, "scores": scores_dict}
    except Exception as e:
        # server connection failed
        connection["text"] = "Could Not Connect to Server: " + str(e)
    # after 10 seconds, delete cached image file from server
    t = Timer(10.0, remove_resource, [img_path])
    t.start()
    # render results using HTML template
    return render_template('index.html',
                           output=output,
                           connection=connection,
                           args=args)
def main():
    # get url parameters for HTML template
    name_arg = request.args.get('name', 'mnist')
    addr_arg = request.args.get('addr', 'tf-serving.kubeflow.svc')
    port_arg = request.args.get('port', '9000')
    args = {"name": name_arg, "addr": addr_arg, "port": port_arg}
    print(args)

    output = None
    connection = {"text": "", "success": False}
    img_id = str(uuid.uuid4())
    img_path = "static/tmp/" + img_id + ".png"
    try:
        # get a random test MNIST image
        x, y, _ = random_mnist(img_path)
        # get prediction from TensorFlow server
        pred, scores, ver = get_prediction(x,
                                           server_host=addr_arg,
                                           server_port=int(port_arg),
                                           server_name=name_arg,
                                           timeout=10)
        # if no exceptions thrown, server connection was a success
        connection["text"] = "Connected (model version: " + str(ver) + ")"
        connection["success"] = True
        # parse class confidence scores from server prediction
        scores_dict = []
        for i in range(0, 10):
            scores_dict += [{"index": str(i), "val": scores[i]}]
        output = {
            "truth": y,
            "prediction": pred,
            "img_path": img_path,
            "scores": scores_dict
        }
    except Exception, e:
        # server connection failed
        connection["text"] = "Could Not Connect to Server: " + str(e)
Exemple #5
0
    def test_predict(self):  # pylint: disable=no-self-use
        this_dir = os.path.dirname(__file__)
        data_dir = os.path.join(this_dir, "..", "data")
        img_path = os.path.abspath(data_dir)

        x, _, _ = mnist_client.random_mnist(img_path)

        server_host = "localhost"
        server_port = 9000
        model_name = "mnist"
        # get prediction from TensorFlow server
        pred, scores, _ = mnist_client.get_prediction(x,
                                                      server_host=server_host,
                                                      server_port=server_port,
                                                      server_name=model_name,
                                                      timeout=10)

        if pred < 0 or pred >= 10:
            raise ValueError(
                "Prediction {0} is not in the range [0, 9]".format(pred))

        if len(scores) != 10:
            raise ValueError(
                "Scores should have dimension 10. Got {0}".format(scores))