def test_get_jsonnable_obj():
    from mlflow.pyfunc.scoring_server import _get_jsonable_obj
    py_ary = [["a", "b", "c"], ["e", "f", "g"]]
    np_ary = _get_jsonable_obj(np.array(py_ary))
    assert json.dumps(py_ary, cls=NumpyEncoder) == json.dumps(np_ary, cls=NumpyEncoder)
    np_ary = _get_jsonable_obj(np.array(py_ary, dtype=type(str)))
    assert json.dumps(py_ary, cls=NumpyEncoder) == json.dumps(np_ary, cls=NumpyEncoder)
Ejemplo n.º 2
0
    def predict(self, deployment_name, df):
        """
        Predict on the specified deployment using the provided dataframe.

        Compute predictions on the pandas DataFrame ``df`` using the specified deployment.
        Note that the input/output types of this method matches that of `mlflow pyfunc predict`
        (we accept a pandas DataFrame as input and return either a pandas DataFrame,
        pandas Series, or numpy array as output).

        :param deployment_name: Name of deployment to predict against
        :param df: Pandas DataFrame to use for inference
        :return: A pandas DataFrame, pandas Series, or numpy array
        """
        try:
            service = Webservice(self.workspace, deployment_name)
        except Exception as e:
            raise MlflowException(
                'Failure retrieving deployment to predict against') from e

        # Take in DF, parse to json using split orient
        input_data = _get_jsonable_obj(df, pandas_orient='split')

        if not service.scoring_uri:
            raise MlflowException(
                'Error attempting to call webservice, scoring_uri unavailable. '
                'This could be due to a failed deployment, or the service is not ready yet.\n'
                'Current State: {}\n'
                'Errors: {}'.format(service.state, service.error))

        # Pass split orient json to webservice
        # Take records orient json from webservice
        resp = ClientBase._execute_func(service._webservice_session.post,
                                        service.scoring_uri,
                                        data=json.dumps(
                                            {'input_data': input_data}))

        if resp.status_code == 401:
            if service.auth_enabled:
                service_keys = service.get_keys()
                service._session.headers.update(
                    {'Authorization': 'Bearer ' + service_keys[0]})
            elif service.token_auth_enabled:
                service_token, refresh_token_time = service.get_access_token()
                service._refresh_token_time = refresh_token_time
                service._session.headers.update(
                    {'Authorization': 'Bearer ' + service_token})
            resp = ClientBase._execute_func(service._webservice_session.post,
                                            service.scoring_uri,
                                            data=input_data)

        if resp.status_code == 200:
            # Parse records orient json to df
            return parse_json_input(json.dumps(resp.json()), orient='records')
        else:
            raise MlflowException('Failure during prediction:\n'
                                  'Response Code: {}\n'
                                  'Headers: {}\n'
                                  'Content: {}'.format(resp.status_code,
                                                       resp.headers,
                                                       resp.content))
Ejemplo n.º 3
0
    def invoke(self, data):
        """
        Invoke inference on input data. The input data must be pandas dataframe or json instance.
        """
        content_type = scoring_server.CONTENT_TYPE_JSON
        post_data = json.dumps(
            scoring_server._get_jsonable_obj(data, pandas_orient="split"))

        response = requests.post(
            url=self.url_prefix + "/invocations",
            data=post_data,
            headers={"Content-Type": content_type},
        )

        if response.status_code != 200:
            raise Exception(
                f"Invocation failed (error code {response.status_code}, response: {response.text})"
            )

        return scoring_server.infer_and_parse_json_input(response.text)
Ejemplo n.º 4
0
def predictions_to_json(raw_predictions, output):
    predictions = _get_jsonable_obj(raw_predictions, pandas_orient="records")
    json.dump(predictions, output, cls=NumpyEncoder)