示例#1
0
 def common_test_cast_regressor(self, dtype, input_type):
     model = CastRegressor(DecisionTreeRegressor(max_depth=2), dtype=dtype)
     data = numpy.array(
         [[0.1, 0.2, 3.1], [1, 1, 0], [0, 2, 1], [1, 0, 2], [0.1, 2.1, 1.1],
          [1.1, 0.1, 2.2], [-0.1, -2.1, -1.1], [-1.1, -0.1, -2.2],
          [0.2, 2.2, 1.2], [1.2, 0.2, 2.2]],
         dtype=numpy.float32)
     y = (numpy.sum(data, axis=1, keepdims=0) +
          numpy.random.randn(data.shape[0]))
     model.fit(data, y)
     pred = model
     assert pred.dtype == dtype
     model_onnx = convert_sklearn(model,
                                  "cast",
                                  [("input", FloatTensorType([None, 3]))],
                                  target_opset=TARGET_OPSET)
     self.assertTrue(model_onnx is not None)
     dump_data_and_model(data,
                         model,
                         model_onnx,
                         basename="SklearnCastRegressor{}".format(
                             input_type.__class__.__name__))
############################################
# Perfect, no discrepencies at all.

print(diff(skl5, ort5))

##############################################
# CastRegressor
# +++++++++++++
#
# The previous example demonstrated the type difference for
# the predicted values explains the small differences between
# :epkg:`scikit-learn` and :epkg:`onnxruntime`. But it does not
# with the current ONNX. Another option is to cast the
# the predictions into floats in the :epkg:`scikit-learn` pipeline.

ctree = CastRegressor(DecisionTreeRegressor(max_depth=max_depth))
ctree.fit(Xi_train, yi_train)

onx6 = to_onnx(ctree, Xi_train[:1].astype(numpy.float32))

sess6 = InferenceSession(onx6.SerializeToString(),
                         providers=['CPUExecutionProvider'])

skl6 = ctree.predict(X32)
ort6 = sess6.run(None, {'X': X32})[0]

print(diff(skl6, ort6))

##############################
# Success!