Ejemplo n.º 1
0
 def test_constructor_fail(self):
     with self.assertRaisesRegex(ValueError, '`meta` does not follow the proper schema'):
         with mock.patch('porter.services.cf.json_encoder', spec={'encode.side_effect': TypeError}) as mock_encoder:
             prediction_service = PredictionService(
                 model=None, name='foo', api_version='bar', meta=object())
     with self.assertRaisesRegex(ValueError, '.*callable.*'):
         prediction_service = PredictionService(model=None, additional_checks=1)
Ejemplo n.º 2
0
 def test__add_feature_schema_batch(self):
     model = mock.Mock()
     model_name = api_version = mock.MagicMock()
     mock_additional_checks = mock.Mock()
     feature_schema = schemas.Object(properties=dict(
         x=schemas.Integer(),
         y=schemas.Number(),
         z=schemas.String(),
     ))
     with mock.patch('porter.services.BaseService.add_request_schema') as mock_add_request_schema:
         prediction_service = PredictionService(
             model=model,
             name=model_name,
             api_version=api_version,
             meta={},
             preprocessor=None,
             postprocessor=None,
             batch_prediction=True,
             feature_schema=feature_schema,
         )
     args = mock_add_request_schema.call_args_list[0][0]
     self.assertEqual(args[0].upper(), 'POST')
     request_obj = args[1]
     self.assertIsInstance(request_obj, schemas.Array)
     item_obj = request_obj.item_type
     self.assertIn('id', item_obj.properties)
     self.assertIn('x', item_obj.properties)
     self.assertIn('y', item_obj.properties)
     self.assertIn('z', item_obj.properties)
Ejemplo n.º 3
0
    def setUpClass(cls):
        # DO NOT set app.testing = True here
        # doing so *disables* error handling in the application and instead
        # passes errors on to the test client (in our case, instances of
        # unittest.TestCase).
        # In this class we actually want to test the applications error handling
        # and thus do not set this attribute.
        # See, http://flask.pocoo.org/docs/0.12/api/#flask.Flask.test_client

        prediction_service = PredictionService(name='failing-model',
                                               api_version='B',
                                               model=None,
                                               meta={
                                                   '1': 'one',
                                                   'two': 2
                                               })

        cls.model_app = ModelApp([prediction_service])
        flask_app = cls.model_app.app

        @flask_app.route('/test-error-handling/', methods=['POST'])
        def test_error():
            flask.request.get_json(force=True)
            raise Exception('exceptional testing of exceptions')

        cls.app_test_client = flask_app.test_client()
Ejemplo n.º 4
0
 def test_request_schema(self):
     model = mock.Mock()
     model_name = api_version = mock.MagicMock()
     mock_additional_checks = mock.Mock()
     feature_schema = schemas.Object(properties=dict(
         x=schemas.Integer(),
         y=schemas.Number(),
         z=schemas.String(),
     ))
     with mock.patch('porter.services.BaseService.add_request_schema') as mock_add_request_schema:
         prediction_service = PredictionService(
             model=model,
             name=model_name,
             api_version=api_version,
             meta={},
             preprocessor=None,
             postprocessor=None,
             batch_prediction=False,
             feature_schema=feature_schema,
         )
     request_schema = prediction_service.request_schema
     request = dict(id=1, x=2, y=3.5, z='4')
     request_schema.validate(request)
     with self.assertRaisesRegex(ValueError, 'data must contain'):
         request = dict(x=2, y=3.5, z='4')
         request_schema.validate(request)
Ejemplo n.º 5
0
 def test__add_prediction_schema_batch(self):
     model = mock.Mock()
     model_name = api_version = mock.MagicMock()
     mock_additional_checks = mock.Mock()
     prediction_schema = schemas.Object(properties=dict(
         prediction=schemas.Number(),
         confidence=schemas.Number(),
     ))
     with mock.patch('porter.services.BaseService.add_response_schema') as mock_add_response_schema:
         prediction_service = PredictionService(
             model=model,
             name=model_name,
             api_version=api_version,
             meta={},
             preprocessor=None,
             postprocessor=None,
             batch_prediction=True,
             prediction_schema=prediction_schema,
         )
     args = mock_add_response_schema.call_args_list[-1][0]
     self.assertEqual(args[0].upper(), 'POST')
     self.assertEqual(args[1], 200)
     response_obj = args[2]
     self.assertIsInstance(response_obj, schemas.Object)
     self.assertIn('request_id', response_obj.properties)
     self.assertIn('model_context', response_obj.properties)
     self.assertIn('predictions', response_obj.properties)
     pred_obj = response_obj.properties['predictions']
     self.assertIsInstance(pred_obj, schemas.Array)
     item_obj = pred_obj.item_type
     self.assertIn('id', item_obj.properties)
     self.assertIn('prediction', item_obj.properties)
     pred_schema = item_obj.properties['prediction']
     self.assertIn('prediction', pred_schema.properties)
     self.assertIn('confidence', pred_schema.properties)
Ejemplo n.º 6
0
 def test_request_schema_response_schema_uninitialized(self):
     model = mock.Mock()
     model_name = 'my-test-model-noschemas'
     api_version = 'v1'
     mock_additional_checks = mock.Mock()
     prediction_schema = schemas.Object(properties=dict(
         prediction=schemas.Number(),
         confidence=schemas.Number(),
     ))
     prediction_service = PredictionService(
         model=model,
         name=model_name,
         api_version=api_version,
         meta={},
         preprocessor=None,
         postprocessor=None,
     )
     # request_schema is None if feature_schema is None
     self.assertIs(prediction_service.request_schema, None)
     # response_schema has a default
     response = dict(
         model_context=dict(
             api_version=api_version,
             model_meta={},
             model_name=model_name),
         predictions=[ dict(id=1, prediction=3.14) ],
         request_id='abcdefg')
     prediction_service.response_schema.validate(response)
Ejemplo n.º 7
0
 def test_serve_no_processing_single(self, mock_request_json):
     # make sure it doesn't break when processors are None
     model = mock.Mock()
     model_name = api_version = mock.MagicMock()
     mock_request_json.return_value = {'id': None}
     model.predict.return_value = [1]
     prediction_service = PredictionService(
         model=model,
         name=model_name,
         api_version=api_version,
         meta={},
         preprocessor=None,
         postprocessor=None,
         batch_prediction=False,
         additional_checks=None
     )
     _ = prediction_service._predict()
Ejemplo n.º 8
0
    def test_get_post_data_instance_prediction(self, mock_request_json):
        mock_model = mock.Mock()
        mock_model.predict.return_value = [1]

        # Succeed
        mock_request_json.return_value = {'id': None}
        prediction_service = PredictionService(
            model=mock_model,
            name=mock.MagicMock(),
            api_version=mock.MagicMock(),
            meta={},
            preprocessor=None,
            postprocessor=None,
            batch_prediction=False,
            additional_checks=None
        )
        _ = prediction_service._predict()
Ejemplo n.º 9
0
    def test_readiness_ready_ready1(self, mock_init):
        mock_init.return_value = None
        svc = PredictionService()
        svc.name = 'model1'
        svc.api_version = '1.0.0'
        svc.id = 'model1'
        svc.endpoint = '/model1/1.0.0/prediction'
        svc.meta = {'foo': 1, 'bar': 2}
        svc.response_schemas = {}
        svc.request_schemas = {}

        model_app = ModelApp([svc])
        app = model_app.app.test_client()

        resp_alive = app.get('/-/alive')
        resp_ready = app.get('/-/ready')
        expected_data = {
            'request_id': '123',
            'porter_version': __version__,
            'deployed_on': cn.HEALTH_CHECK_VALUES.DEPLOYED_ON,
            'app_meta': {
                'description': '<div></div><div><p>(porter v0.15.3)</p></div>',
                'expose_docs': False,
                'name': None,
                'version': None
            },
            'services': {
                'model1': {
                    'status': 'READY',
                    'endpoint': '/model1/1.0.0/prediction',
                    'model_context': {
                        'model_name': 'model1',
                        'api_version': '1.0.0',
                        'model_meta': {
                            'foo': 1,
                            'bar': 2
                        }
                    }
                }
            }
        }
        self.assertEqual(resp_alive.status_code, 200)
        self.assertEqual(resp_ready.status_code, 200)
        alive_response = json.loads(resp_alive.data)
        ready_respnose = json.loads(resp_ready.data)
        self.assertEqual(alive_response, expected_data)
        self.assertEqual(ready_respnose, expected_data)
        # make sure the defined schema matches reality
        sc.health_check.validate(alive_response)  # should not raise exception
        sc.health_check.validate(ready_respnose)  # should not raise exception
Ejemplo n.º 10
0
    def test_serve_success_batch(self, mock_responses_api, mock_request_json):
        # TODO rename this or next test
        mock_request_json.return_value = [
            {'id': 1, 'feature1': 10, 'feature2': 0},
            {'id': 2, 'feature1': 11, 'feature2': 1},
            {'id': 3, 'feature1': 12, 'feature2': 2},
            {'id': 4, 'feature1': 13, 'feature2': 3},
            {'id': 5, 'feature1': 14, 'feature2': 3},
        ]
        mock_responses_api.jsonify = lambda payload, status_code: payload
        mock_model = mock.Mock()
        test_model_name = 'model'
        test_api_version = '1.0.0'
        mock_preprocessor = mock.Mock()
        mock_postprocessor = mock.Mock()

        feature_values = {str(x): x for x in range(5)}
        mock_model.predict = lambda X: X['feature1'] + X['feature2'].map(feature_values) + X['feature3']
        def preprocess(X):
            X['feature2'] = X.feature2.astype(str)
            X['feature3'] = range(len(X))
            return X
        mock_preprocessor.process = preprocess
        def postprocess(X_in, X_pre, preds):
            return preds * 2
        mock_postprocessor.process = postprocess
        prediction_service = PredictionService(
            model=mock_model,
            name=test_model_name,
            api_version=test_api_version,
            meta={'1': '2', '3': '4'},
            preprocessor=mock_preprocessor,
            postprocessor=mock_postprocessor,
            batch_prediction=True,
            additional_checks=None
        )
        mock_responses_api.get_model_context.return_value = prediction_service
        actual = prediction_service()
        expected = {
            'request_id': 123,
            'model_context': {
                'model_name': test_model_name,
                'api_version': test_api_version,
                'model_meta': {
                    '1': '2',
                    '3': '4'
                }
            },
            'predictions': [
                {'id': 1, 'prediction': 20},
                {'id': 2, 'prediction': 26},
                {'id': 3, 'prediction': 32},
                {'id': 4, 'prediction': 38},
                {'id': 5, 'prediction': 42},
            ]
        }
        self.assertEqual(actual, expected)
Ejemplo n.º 11
0
    def test_get_post_data_batch_prediction(self, mock_request_json):
        mock_model = mock.Mock()
        mock_model.predict.return_value = []
        mock_name = mock_version = mock.MagicMock()

        # Succeed
        mock_request_json.return_value = [{'id': 1}]
        prediction_service = PredictionService(
            model=mock_model,
            name=mock_name,
            api_version=mock_version,
            meta={},
            preprocessor=None,
            postprocessor=None,
            batch_prediction=True,
            additional_checks=None
        )
        _ = prediction_service._predict()
Ejemplo n.º 12
0
 def test__predict_additional_checks(self, mock_request_json):
     model = mock.Mock()
     model_name = api_version = mock.MagicMock()
     mock_request_json.return_value = {'id': 1}
     model.predict.return_value = [1]
     mock_additional_checks = mock.Mock()
     prediction_service = PredictionService(
         model=model,
         name=model_name,
         api_version=api_version,
         meta={},
         preprocessor=None,
         postprocessor=None,
         batch_prediction=False,
         additional_checks=mock_additional_checks
     )
     _ = prediction_service._predict()
     mock_additional_checks.assert_called()
Ejemplo n.º 13
0
 def setUpClass(cls):
     service1 = PredictionService(
         name='service1',
         api_version='2',
         model=None,  # we're not going to make calls for predictions here
         feature_schema=sc.Object(properties={
             'a': sc.Integer(),
             'b': sc.Integer(),
             'c': sc.Number()
         }))
     service1 = PredictionService(
         namespace='ns',
         name='service2',
         api_version='1',
         model=None,  # we're not going to make calls for predictions here
         feature_schema=sc.Object(properties={
             'a': sc.Integer(),
             'b': sc.Integer()
         }))
Ejemplo n.º 14
0
 def test__predict_additional_checks_raises_422(self, mock_request_json):
     model = mock.Mock()
     model_name = api_version = mock.MagicMock()
     mock_request_json.return_value = {'id': 1}
     model.predict.return_value = [1]
     mock_additional_checks = mock.Mock()
     mock_additional_checks.side_effect = ValueError('verify user message is passed on')
     prediction_service = PredictionService(
         model=model,
         name=model_name,
         api_version=api_version,
         meta={},
         preprocessor=None,
         postprocessor=None,
         batch_prediction=False,
         additional_checks=mock_additional_checks
     )
     with self.assertRaisesRegex(werkzeug_exc.UnprocessableEntity, '.*verify user message is passed on.*'):
         _ = prediction_service._predict()
     mock_additional_checks.assert_called()
Ejemplo n.º 15
0
    def test_get_post_data_validation(self, mock_request_json):
        # this test also implicitly covers BaseService.get_post_data
        mock_model = mock.Mock()
        mock_model.predict.return_value = []
        mock_name = mock_version = mock.MagicMock()
        feature_schema = schemas.Object(properties=dict(x=schemas.Integer()))
        prediction_service = PredictionService(
            model=mock_model,
            name=mock_name,
            api_version=mock_version,
            meta={},
            preprocessor=None,
            postprocessor=None,
            batch_prediction=True,
            feature_schema=feature_schema,
            additional_checks=None
        )

        # Succeed
        mock_request_json.return_value = [{'id': 1, 'x': 37}]
        prediction_service.get_post_data()

        # Succeed
        mock_request_json.return_value = [{'id': 1, 'x': 3.7}]
        prediction_service.get_post_data()

        # Fail
        prediction_service = PredictionService(
            model=mock_model,
            name=mock_name,
            api_version=mock_version + 1,
            meta={},
            preprocessor=None,
            postprocessor=None,
            batch_prediction=True,
            feature_schema=feature_schema,
            validate_request_data=True,
            additional_checks=None)
        with self.assertRaises(werkzeug_exc.UnprocessableEntity):
            prediction_service.get_post_data()
Ejemplo n.º 16
0
 def test_serve_with_processing_single(self, mock_request_json):
     model = mock.Mock()
     model_name = api_version = mock.MagicMock()
     mock_request_json.return_value = {'id': None}
     model.predict.return_value = [1]
     mock_preprocessor = mock.Mock()
     mock_preprocessor.process.return_value = {}
     mock_postprocessor = mock.Mock()
     mock_postprocessor.process.return_value = [1]
     prediction_service = PredictionService(
         model=model,
         name=model_name,
         api_version=api_version,
         meta={},
         preprocessor=mock_preprocessor,
         postprocessor=mock_postprocessor,
         batch_prediction=False,
         additional_checks=None
     )
     _ = prediction_service._predict()
     mock_preprocessor.process.assert_called()
     mock_postprocessor.process.assert_called()
Ejemplo n.º 17
0
 def test_serve_fail(self, mock_responses_api, mock_services_api, mock__predict):
     mock__predict.side_effect = Exception
     name = 'my-model'
     version = '1.0'
     meta = {}
     with self.assertRaises(werkzeug_exc.InternalServerError) as ctx:
         sp = PredictionService(
             model=mock.Mock(), name=name, api_version=version,
             meta=meta, preprocessor=mock.Mock(), postprocessor=mock.Mock(),
             batch_prediction=mock.Mock(),
             additional_checks=mock.Mock())
         sp()
         # porter.responses.make_error_response counts on these attributes being filled out
         self.assertEqual(ctx.exception.model_name, name)
         self.assertEqual(ctx.exception.api_version, version)
         self.assertEqual(ctx.exception.model_meta, meta)
Ejemplo n.º 18
0
 def test_response_schema(self):
     model = mock.Mock()
     model_name = 'my-test-model'
     api_version = 'v1.2'
     mock_additional_checks = mock.Mock()
     prediction_schema = schemas.Object(properties=dict(
         prediction=schemas.Number(),
         confidence=schemas.Number(),
     ))
     with mock.patch('porter.services.BaseService.add_response_schema') as mock_add_response_schema:
         prediction_service = PredictionService(
             model=model,
             name=model_name,
             api_version=api_version,
             meta={},
             preprocessor=None,
             postprocessor=None,
             batch_prediction=False,
             prediction_schema=prediction_schema,
         )
     response_schema = prediction_service.response_schema
     response = dict(
         model_context=dict(
             api_version=api_version,
             model_meta={},
             model_name=model_name),
         predictions=dict(
             id=1,
             prediction=dict(prediction=3.14, confidence=2.72)),
         request_id='abcdefg')
     response_schema.validate(response)
     response = dict(
         model_context=dict(
             api_version=1,
             model_meta={},
             model_name=model_name),
         predictions=dict(
             id=1,
             prediction=dict(prediction=3.14, confidence=2.72)),
         request_id='abcdefg')
     with self.assertRaisesRegex(ValueError, 'data.model_context.api_version must be string'):
         response_schema.validate(response)
Ejemplo n.º 19
0
 def test_constructor(self):
     prediction_service = PredictionService(
         model=None, name='foo', api_version='bar', meta={'1': '2', '3': 4})
Ejemplo n.º 20
0
    def setUpClass(cls):
        # define objects for model 1
        class Preprocessor1(BasePreProcessor):
            def process(self, X):
                X = X.copy()  # silence SettingWithCopyWarning
                X['feature2'] = X.feature2.astype(str)
                return X

        class Model1(BaseModel):
            feature2_map = {str(x + 1): x for x in range(5)}

            def predict(self, X):
                return X['feature1'] * X.feature2.map(self.feature2_map)

        class Postprocessor1(BasePostProcessor):
            def process(self, X_input, X_preprocessed, predictions):
                return predictions * -1

        feature_schema1 = sc.Object(properties={
            'feature1': sc.Number(),
            'feature2': sc.Number(),
        })

        # define objects for model 2
        class Preprocessor2(BasePreProcessor):
            def process(self, X):
                X['feature3'] = range(len(X))
                return X

        class Model2(BaseModel):
            def predict(self, X):
                return X['feature1'] + X['feature3']

        feature_schema2 = sc.Object(properties={'feature1': sc.Number()})

        def user_check(X):
            if (X.feature1 == 0).any():
                raise exc.UnprocessableEntity

        # define objects for model 3
        class Model3(BaseModel):
            def predict(self, X):
                return X['feature1'] * -1

        feature_schema3 = sc.Object(properties={'feature1': sc.Number()})
        wrong_prediction_schema3 = sc.Number(additional_params=dict(minimum=0))

        cls.prediction_service_error = E = Exception(
            'this mock service failed during prediction')

        class ModelFailing(BaseModel):
            def predict(self, X):
                raise E

        # define configs and add services to app
        prediction_service1 = PredictionService(model=Model1(),
                                                name='a-model',
                                                api_version='v0',
                                                action='predict',
                                                preprocessor=Preprocessor1(),
                                                postprocessor=Postprocessor1(),
                                                feature_schema=feature_schema1,
                                                validate_request_data=True,
                                                batch_prediction=True)
        prediction_service2 = PredictionService(model=Model2(),
                                                name='anotherModel',
                                                api_version='v1',
                                                namespace='n/s/',
                                                preprocessor=Preprocessor2(),
                                                postprocessor=None,
                                                feature_schema=feature_schema2,
                                                validate_request_data=True,
                                                batch_prediction=True,
                                                additional_checks=user_check)
        with warnings.catch_warnings():
            warnings.simplefilter('ignore')
            prediction_service3 = PredictionService(
                model=Model3(),
                name='model-3',
                api_version='v0.0-alpha',
                preprocessor=None,
                postprocessor=None,
                feature_schema=feature_schema3,
                validate_request_data=True,
                validate_response_data=True,
                batch_prediction=False,
                meta={
                    'algorithm': 'randomforest',
                    'lasttrained': 1
                })
            prediction_service4 = PredictionService(
                model=Model3(),
                name='model-4',
                api_version='v0.0-alpha',
                preprocessor=None,
                postprocessor=None,
                feature_schema=feature_schema3,
                validate_request_data=True,
                validate_response_data=True,
                batch_prediction=False,
                meta={
                    'algorithm': 'randomforest',
                    'lasttrained': 1
                })
            prediction_service5 = PredictionService(
                model=Model3(),
                name='model-5',
                api_version='v0.0-alpha',
                preprocessor=None,
                postprocessor=None,
                feature_schema=feature_schema3,
                prediction_schema=wrong_prediction_schema3,
                validate_request_data=True,
                validate_response_data=True,
                batch_prediction=False,
                meta={
                    'algorithm': 'randomforest',
                    'lasttrained': 1
                })
        prediction_service_failing = PredictionService(
            model=ModelFailing(),
            name='failing-model',
            api_version='v1',
            action='fail',
        )
        cls.model_app = ModelApp([
            prediction_service1,
            prediction_service2,
            prediction_service3,
            prediction_service4,
            prediction_service5,
            prediction_service_failing,
        ])
        cls.app = cls.model_app.app.test_client()
Ejemplo n.º 21
0
import logging

from porter.datascience import BaseModel
from porter.services import ModelApp, PredictionService
from porter.utils import JSONLogFormatter


class Model(BaseModel):
    def predict(self, X):
        return (X['foo'] % 3) * X['bar']


prediction_svc = PredictionService(model=Model(),
                                   name='my-model',
                                   api_version='v1',
                                   batch_prediction=True,
                                   log_api_calls=True)

app = ModelApp([prediction_svc])

if __name__ == '__main__':
    import logging
    stream_handler = logging.StreamHandler()
    formatter = JSONLogFormatter('asctime', 'levelname', 'module', 'name',
                                 'message', 'request_id', 'request_data',
                                 'response_data', 'service_class', 'event')
    stream_handler.setFormatter(formatter)
    logger = logging.getLogger('porter')
    logger.setLevel('INFO')
    logger.addHandler(stream_handler)
Ejemplo n.º 22
0
prediction_service = PredictionService(
    model=model,  # The value of model.predict() is
    # returned to the client.
    # Required.
    #
    name='supa-dupa-model',  # Name of the model. This determines
    # the route. E.g. send POST requests
    # for this model to
    #   host:port/supa-dupa-model/prediction/
    # Required.
    #
    api_version='v1',  # The version of the model. Returned
    # to client in the prediction response.
    # Required.
    #
    preprocessor=preprocessor,  # preprocessor.process() is
    # called on the POST request data
    # before predicting. Optional.
    #
    postprocessor=Postprocessor(),  # postprocessor.process() is
    # called on the model's predictions before
    # returning to user. Optional.
    #
    feature_schema=feature_schema,  # The input schema is used to validate
    # the payload of the POST request.
    # Optional.
    validate_request_data=True,  # Whether to validate the request data.
    #
    batch_prediction=True  # Whether the API will accept an array of
    # JSON objects to predict on or a single
    # JSON object only.
)
Ejemplo n.º 23
0
feature_schema = Object(
    'Inputs to the ratings model',
    properties=dict(
        user_id=Integer('The user ID.'),
        title_id=Integer('The title ID.'),
        is_tv=Boolean('Whether the content is a TV show.'),
        genre=String('The genre.',
                     additional_params={'enum': ['comedy', 'action', 'drama']}),
        average_rating=Number('The title\'s average rating.',
                              additional_params={'minimum': 0, 'maximum': 10}),
    ),
    reference_name='RatingsModelFeatures'
)

# build the prediction service
prediction_service = PredictionService(
    model=my_model,
    name='my-model',
    api_version='v1',
    feature_schema=feature_schema,
    validate_request_data=True)

app = ModelApp(
    [prediction_service],
    name='Example Model',
    description='Minimal example of a model with input validation and documentation.',
    expose_docs=True)

if __name__ == '__main__':
    app.run()
Ejemplo n.º 24
0
import json
import os
import sys
import threading
import time
import urllib.error
import urllib.request

from porter.services import ModelApp, PredictionService

service1 = PredictionService(model=None, name='a-model', api_version='0.0.0')

service2 = PredictionService(model=None,
                             name='yet-another-model',
                             api_version='1.0.0')

service3 = PredictionService(model=None,
                             name='yet-another-yet-another-model',
                             api_version='1.0.0-alpha',
                             meta={'arbitrary details': 'about the model'})

model_app = ModelApp([service1, service2, service3])


def get(url):
    with urllib.request.urlopen(url) as f:
        return f.read()


def run_app(model_app):
    t = threading.Thread(target=model_app.run, daemon=True)
Ejemplo n.º 25
0
    })
except Exception as err:
    print(err)



"""
Now we can instantiate a PredictionService for our model and simply pass it
the schema. Validation can be enabled by setting `validate_request_data=True`.
"""


batch_prediction_service = PredictionService(
    model=RatingsModel(),
    name='user-ratings',
    api_version='v2',
    namespace='datascience',
    feature_schema=ratings_feature_schema,
    validate_request_data=True)


"""
Because batch prediction is enabled in `porter` APIs by default, the following
is a valid payload to `/datascience/user-ratings/v2/prediction`:

[
    {
        "id": 1,
        "user_id": 122333,
        "title_id": 444455555,
        "genre": "comedy",