def test_server_start(): port = random.randint(1000, 65535) serving_config = ServingConfig(http_port=port) python_config = PythonConfig( python_code="first += 2", python_inputs={"first": "NDARRAY"}, python_outputs={"first": "NDARRAY"}, ) step = PythonStep().step(python_config) server = Server(steps=step, serving_config=serving_config) server.start() client = server.get_client() data_input = {"default": np.load("../data/input-0.npy")} assert is_port_in_use(port) try: predicted = client.predict(data_input) print(predicted) server.stop() except Exception as e: print(e) server.stop()
def test_setup_and_run_start(): port = random.randint(1000, 65535) serving_config = ServingConfig(http_port=port) python_config = PythonConfig( python_code= "def setup(): pass\ndef run(input): {'output': np.array(input + 2)}", python_inputs={"input": "NDARRAY"}, python_outputs={"output": "NDARRAY"}, setup_and_run=True, ) step = PythonStep().step(python_config) server = Server(steps=step, serving_config=serving_config) server.start() client = server.get_client() data_input = {"default": np.asarray([42.0, 1.0])} assert is_port_in_use(port) try: predicted = client.predict(data_input) print(predicted) server.stop() except Exception as e: print(e) server.stop()
def test_build_tp(output_format): schema = pydatavec.Schema() schema.add_string_column("first") tp = pydatavec.TransformProcess(schema) tp.append_string("first", "two") java_tp = tp.to_java() tp_json = java_tp.toJson() load_java_tp(tp_json) _ = json.dumps(tp_json) as_python_json = json.loads(tp_json) transform_process = (TransformProcessStep().set_input( column_names=["first"], types=["String" ]).set_output(column_names=["first"], types=["String" ]).transform_process(as_python_json)) port = random.randint(1000, 65535) serving_config = ServingConfig( http_port=port, input_data_format="JSON", output_data_format=output_format, log_timings=True, ) inference_config = InferenceConfiguration(serving_config=serving_config, steps=[transform_process]) as_json = config_to_dict_with_type(inference_config) inference_from_json(as_json) server = Server( inference_config=inference_config, extra_start_args="-Xmx8g", jar_path="konduit.jar", ) server.start() client = server.get_client() assert is_port_in_use(port) try: predicted = client.predict({"first": "value"}) print(predicted) server.stop() except Exception as e: print(e) server.stop()
from konduit.server import Server from konduit.utils import default_python_path import os import cv2 work_dir = os.path.abspath(".") python_config = PythonConfig( python_path=default_python_path(work_dir), python_code_path=os.path.join(work_dir, "detect_image.py"), python_inputs={"image": "NDARRAY"}, python_outputs={"num_boxes": "NDARRAY"}, ) server = Server( serving_config=ServingConfig(http_port=1337), steps=PythonStep().step(python_config) ) server.start() # client = Client(port=1337) client = server.get_client() img_path = "./Ultra-Light-Fast-Generic-Face-Detector-1MB/imgs/1.jpg" img = cv2.cvtColor(cv2.imread(img_path), cv2.COLOR_BGR2RGB).astype("int16") try: print(client.predict(img)) finally: server.stop()