def test_serving_calls_model_fn_once(docker_image, sagemaker_local_session): script_path = os.path.join(resources_path, 'call_model_fn_once.py') model_path = 'file://{}'.format( os.path.join(resources_path, 'model.tar.gz')) model = ChainerModel(model_path, 'unused/dummy-role', script_path, image=docker_image, model_server_workers=2, sagemaker_session=sagemaker_local_session) with test_utils.local_mode_lock(): try: predictor = model.deploy(1, 'local') predictor.accept = None predictor.deserializer = BytesDeserializer() # call enough times to ensure multiple requests to a worker for i in range(3): # will return 500 error if model_fn called during request handling response = predictor.predict(b'input') assert response == b'output' finally: predictor.delete_endpoint()
def test_serving_calls_model_fn_once(docker_image, sagemaker_local_session, instance_type): with _predictor(model_cpu_dir, call_model_fn_once_script, docker_image, sagemaker_local_session, instance_type, model_server_workers=2) as predictor: predictor.accept = None predictor.deserializer = BytesDeserializer() # call enough times to ensure multiple requests to a worker for i in range(3): # will return 500 error if model_fn called during request handling output = predictor.predict(b'input') assert output == b'output'
def test_bytes_deserializer(): result = BytesDeserializer()(io.BytesIO(b'[1, 2, 3]'), 'application/json') assert result == b'[1, 2, 3]'
def test_bytes_deserializer(): result = BytesDeserializer()(io.BytesIO(b"[1, 2, 3]"), "application/json") assert result == b"[1, 2, 3]"