def test_sample_from_data(self, PredictService): predict_service = PredictService(mapping=[ ('name', 'str'), ('sepal width', 'int'), ], ) model = Mock() request_args = {'name': 'myflower', 'sepal width': 3} sample = predict_service.sample_from_data(model, request_args) assert sample[0] == 'myflower' assert sample[1] == 3
def test_sample_from_request(self, PredictService): predict_service = PredictService(mapping=[ ('name', 'str'), ('sepal width', 'int'), ], ) model = Mock() request = Mock(args={'name': 'myflower', 'sepal width': 3}) sample, params = predict_service.sample_from_request(model, request) assert sample[0][0] == 'myflower' assert sample[0][1] == 3
def test_sample_from_data(self, PredictService): predict_service = PredictService( mapping=[ ('name', 'str'), ('sepal width', 'int'), ], ) model = Mock() request_args = {'name': 'myflower', 'sepal width': 3} sample = predict_service.sample_from_data(model, request_args) assert sample[0] == 'myflower' assert sample[1] == 3
def test_sample_from_request(self, PredictService): predict_service = PredictService( mapping=[ ('name', 'str'), ('sepal width', 'int'), ], ) model = Mock() request = Mock(args={'name': 'myflower', 'sepal width': 3}) sample, params = predict_service.sample_from_request(model, request) assert sample[0][0] == 'myflower' assert sample[0][1] == 3
def test_predict_params(self, config, stream): from palladium.server import PredictService line = '[{"length": 1.0, "width": 1.0, "turbo": "true"}]' model = Mock() model.predict.return_value = np.array([[{'class': 'a'}]]) model.turbo = False model.magic = False stream.model = model mapping = [ ('length', 'float'), ('width', 'float'), ] params = [ ('turbo', 'bool'), # will be set by request args ('magic', 'bool'), # default value will be used ] stream.predict_service = PredictService( mapping=mapping, params=params, ) expected = [{'class': 'a'}] result = stream.process_line(line) assert result == expected assert model.predict.call_count == 1 assert (model.predict.call_args[0][0] == np.array([[1.0, 1.0]])).all() assert model.predict.call_args[1]['turbo'] is True assert model.predict.call_args[1]['magic'] is False
def test_unwrap_sample_post(self, PredictService, flask_app): predict_service = PredictService( mapping=[('text', 'str')], unwrap_sample=True, ) model = Mock() model.predict.return_value = np.array([1, 2]) with flask_app.test_request_context(): request = Mock( json=[ { 'text': 'First piece of text' }, { 'text': 'Second piece of text' }, ], method='POST', mimetype='application/json', ) resp = predict_service(model, request) assert model.predict.call_args[0][0].ndim == 1 assert (model.predict.call_args[0] == np.array( ['First piece of text', 'Second piece of text'])).all() resp_data = json.loads(resp.get_data(as_text=True)) assert resp.status_code == 200 assert resp_data == { "metadata": { "status": "OK", "error_code": 0, }, "result": [1, 2], }
def test_unwrap_sample_get(self, PredictService, flask_app): predict_service = PredictService( mapping=[('text', 'str')], unwrap_sample=True, ) model = Mock() model.predict.return_value = np.array([1]) with flask_app.test_request_context(): request = Mock( args=dict([ ('text', 'Hi this is text'), ]), method='GET', ) resp = predict_service(model, request) assert model.predict.call_args[0][0].ndim == 1 model.predict.assert_called_with(np.array(['Hi this is text'])) resp_data = json.loads(resp.get_data(as_text=True)) assert resp.status_code == 200 assert resp_data == { "metadata": { "status": "OK", "error_code": 0, }, "result": 1, }
def test_post_request(self, PredictService, flask_app): model = Mock() model.predict.return_value = np.array([3, 2]) service = PredictService( mapping=[ ('sepal length', 'float'), ('sepal width', 'float'), ('petal length', 'float'), ('petal width', 'float'), ], params=[ ('threshold', 'float'), ], ) request = Mock( json=[ { 'sepal length': '5.2', 'sepal width': '3.5', 'petal length': '1.5', 'petal width': '0.2', }, { 'sepal length': '5.7', 'sepal width': '4.0', 'petal length': '2.0', 'petal width': '0.7', }, ], args=dict(threshold=1.0), method='POST', mimetype='application/json', ) with flask_app.test_request_context(): resp = service(model, request) assert (model.predict.call_args[0][0] == np.array( [ [5.2, 3.5, 1.5, 0.2], [5.7, 4.0, 2.0, 0.7], ], dtype='object', )).all() assert model.predict.call_args[1]['threshold'] == 1.0 assert resp.status_code == 200 expected_resp_data = { "metadata": { "status": "OK", "error_code": 0, }, "result": [3, 2], } assert json.loads(resp.get_data(as_text=True)) == expected_resp_data
def test_probas(self, PredictService, flask_app): model = Mock() model.predict_proba.return_value = np.array([[0.1, 0.5, math.pi]]) predict_service = PredictService(mapping=[], predict_proba=True) with flask_app.test_request_context(): resp = predict_service(model, request) resp_data = json.loads(resp.get_data(as_text=True)) assert resp.status_code == 200 assert resp_data == { "metadata": { "status": "OK", "error_code": 0, }, "result": [0.1, 0.5, math.pi], }
def test_generic_error(self, PredictService, flask_app): predict_service = PredictService(mapping=[]) model = Mock() request = Mock() with patch.object(predict_service, 'do') as psd: with flask_app.test_request_context(): psd.side_effect = KeyError("model") resp = predict_service(model, request) resp_data = json.loads(resp.get_data(as_text=True)) assert resp.status_code == 500 assert resp_data == { "metadata": { "status": "ERROR", "error_code": -1, "error_message": "KeyError: 'model'", } }
def test_predict_error(self, PredictService, flask_app): from palladium.interfaces import PredictError predict_service = PredictService(mapping=[]) model = Mock() request = Mock() with patch.object(predict_service, 'do') as psd: with flask_app.test_request_context(): psd.side_effect = PredictError("mymessage", 123) resp = predict_service(model, request) resp_data = json.loads(resp.get_data(as_text=True)) assert resp.status_code == 500 assert resp_data == { "metadata": { "status": "ERROR", "error_code": 123, "error_message": "mymessage", } }
def test_probas(self, PredictService, flask_app): model = Mock() model.predict_proba.return_value = np.array([[0.1, 0.5, 0.4]]) predict_service = PredictService(mapping=[], predict_proba=True) with flask_app.test_request_context(): with patch('palladium.util.get_config') as get_config: meta_dict = {'service_metadata': {}} get_config.return_value = meta_dict resp = predict_service(model, request) resp_data = json.loads(resp.get_data(as_text=True)) assert resp.status_code == 200 assert resp_data == { "metadata": { "status": "OK", "error_code": 0, }, "result": [0.1, 0.5, 0.4], }
def test_bad_request(self, PredictService, flask_app): predict_service = PredictService(mapping=[]) model = Mock() request = Mock() with patch.object(predict_service, 'do') as psd: with flask_app.test_request_context(): bad_request = BadRequest() bad_request.args = ('daniel', ) psd.side_effect = bad_request resp = predict_service(model, request) resp_data = json.loads(resp.get_data(as_text=True)) assert resp.status_code == 400 assert resp_data == { "metadata": { "status": "ERROR", "error_code": -1, "error_message": "BadRequest: ('daniel',)" } }
def test_functional(self, PredictService, flask_app): model = Mock() model.threshold = 0.3 model.size = 10 # needed as hasattr would evaluate to True otherwise del model.threshold2 del model.size2 model.predict.return_value = np.array(['class1']) service = PredictService( mapping=[ ('sepal length', 'float'), ('sepal width', 'float'), ('petal length', 'float'), ('petal width', 'float'), ('color', 'str'), ('age', 'int'), ('active', 'bool'), ('austrian', 'bool'), ], params=[ ('threshold', 'float'), # default will be overwritten ('size', 'int'), # not provided, default value kept ('threshold2', 'float'), # will be used, no default value ('size2', 'int'), # not provided, no default value ]) with flask_app.test_request_context(): with patch('palladium.util.get_config') as get_config: get_config.return_value = { 'service_metadata': { 'service_name': 'iris', 'service_version': '0.1' } } request = Mock( args=dict([ ('sepal length', '5.2'), ('sepal width', '3.5'), ('petal length', '1.5'), ('petal width', '0.2'), ('color', 'purple'), ('age', '1'), ('active', 'True'), ('austrian', 'False'), ('threshold', '0.7'), ('threshold2', '0.8'), ]), method='GET', ) resp = service(model, request) assert (model.predict.call_args[0][0] == np.array( [[5.2, 3.5, 1.5, 0.2, 'purple', 1, True, False]], dtype='object')).all() assert model.predict.call_args[1]['threshold'] == 0.7 assert model.predict.call_args[1]['size'] == 10 assert model.predict.call_args[1]['threshold2'] == 0.8 assert 'size2' not in model.predict.call_args[1] assert resp.status_code == 200 expected_resp_data = { "metadata": { "status": "OK", "error_code": 0, "service_name": "iris", "service_version": "0.1", }, "result": "class1" } assert json.loads(resp.get_data(as_text=True)) == expected_resp_data