Esempio n. 1
0
 def test_onnx_simple_text_plot_toy(self):
     x = numpy.random.randn(10, 3).astype(numpy.float32)
     node1 = OnnxAdd('X', x, op_version=15)
     node2 = OnnxSub('X', x, op_version=15)
     node3 = OnnxAbs(node1, op_version=15)
     node4 = OnnxAbs(node2, op_version=15)
     node5 = OnnxDiv(node3, node4, op_version=15)
     node6 = OnnxAbs(node5, output_names=['Y'], op_version=15)
     onx = node6.to_onnx({'X': x.astype(numpy.float32)},
                         outputs={'Y': x},
                         target_opset=15)
     text = onnx_simple_text_plot(onx, verbose=False)
     expected = textwrap.dedent("""
     Add(X, Ad_Addcst) -> Ad_C0
       Abs(Ad_C0) -> Ab_Y0
     Identity(Ad_Addcst) -> Su_Subcst
       Sub(X, Su_Subcst) -> Su_C0
         Abs(Su_C0) -> Ab_Y02
         Div(Ab_Y0, Ab_Y02) -> Di_C0
           Abs(Di_C0) -> Y
     """).strip(" \n")
     self.assertIn(expected, text)
     text2, out, err = self.capture(
         lambda: onnx_simple_text_plot(onx, verbose=True))
     self.assertEqual(text, text2)
     self.assertIn('BEST:', out)
     self.assertEmpty(err)
Esempio n. 2
0
 def test_onnx_simple_text_plot_kmeans_links(self):
     x = numpy.random.randn(10, 3)
     model = KMeans(3)
     model.fit(x)
     onx = to_onnx(model, x.astype(numpy.float32), target_opset=15)
     text = onnx_simple_text_plot(onx, add_links=True)
     self.assertIn("Sqrt(Ad_C0) -> scores  <------", text)
     self.assertIn("|-|", text)
Esempio n. 3
0
 def test_onnx_simple_text_plot_leaky(self):
     x = OnnxLeakyRelu('X', alpha=0.5, op_version=15, output_names=['Y'])
     onx = x.to_onnx({'X': FloatTensorType()},
                     outputs={'Y': FloatTensorType()},
                     target_opset=15)
     text = onnx_simple_text_plot(onx)
     expected = textwrap.dedent("""
     LeakyRelu(X, alpha=0.50) -> Y
     """).strip(" \n")
     self.assertIn(expected, text)
Esempio n. 4
0
 def test_onnx_simple_text_plot_knnr(self):
     x = numpy.random.randn(10, 3)
     y = numpy.random.randn(10)
     model = RadiusNeighborsRegressor(3)
     model.fit(x, y)
     onx = to_onnx(model, x.astype(numpy.float32), target_opset=15)
     text = onnx_simple_text_plot(onx, verbose=False)
     expected = "              Neg(arange_y0) -> arange_Y0"
     self.assertIn(expected, text)
     self.assertIn(", to=7)", text)
     self.assertIn(", keepdims=0)", text)
     self.assertIn(", perm=[1,0])", text)
Esempio n. 5
0
    def test_onnx_simple_text_plot_if(self):

        opv = TARGET_OPSET
        x1 = numpy.array([[0, 3], [7, 0]], dtype=numpy.float32)
        x2 = numpy.array([[1, 0], [2, 0]], dtype=numpy.float32)

        node = OnnxAdd('x1', 'x2', output_names=['absxythen'], op_version=opv)
        then_body = node.to_onnx({
            'x1': x1,
            'x2': x2
        },
                                 target_opset=opv,
                                 outputs=[('absxythen', FloatTensorType())])
        node = OnnxSub('x1', 'x2', output_names=['absxyelse'], op_version=opv)
        else_body = node.to_onnx({
            'x1': x1,
            'x2': x2
        },
                                 target_opset=opv,
                                 outputs=[('absxyelse', FloatTensorType())])
        del else_body.graph.input[:]
        del then_body.graph.input[:]

        cond = OnnxGreater(OnnxReduceSum('x1', op_version=opv),
                           OnnxReduceSum('x2', op_version=opv),
                           op_version=opv)
        ifnode = OnnxIf(cond,
                        then_branch=then_body.graph,
                        else_branch=else_body.graph,
                        op_version=opv,
                        output_names=['y'])
        model_def = ifnode.to_onnx({
            'x1': x1,
            'x2': x2
        },
                                   target_opset=opv,
                                   outputs=[('y', FloatTensorType())])
        text = onnx_simple_text_plot(model_def)
        expected = textwrap.dedent("""
        input:
        """).strip(" \n")
        self.assertIn(expected, text)
        self.assertIn("If(Gr_C0) -> y", text)
        oinf = OnnxInference(model_def)
        text2 = oinf.to_text(kind="seq")
        self.assertEqual(text, text2)
Esempio n. 6
0
 def test_onnx_simple_text_plot_kmeans(self):
     x = numpy.random.randn(10, 3)
     model = KMeans(3)
     model.fit(x)
     onx = to_onnx(model, x.astype(numpy.float32), target_opset=15)
     text = onnx_simple_text_plot(onx)
     expected1 = textwrap.dedent("""
     ReduceSumSquare(X, axes=[1], keepdims=1) -> Re_reduced0
       Mul(Re_reduced0, Mu_Mulcst) -> Mu_C0
         Gemm(X, Ge_Gemmcst, Mu_C0, alpha=-2.00, transB=1) -> Ge_Y0
       Add(Re_reduced0, Ge_Y0) -> Ad_C01
         Add(Ad_Addcst, Ad_C01) -> Ad_C0
           Sqrt(Ad_C0) -> scores
           ArgMin(Ad_C0, axis=1, keepdims=0) -> label
     """).strip(" \n")
     expected2 = textwrap.dedent("""
     ReduceSumSquare(X, axes=[1], keepdims=1) -> Re_reduced0
       Mul(Re_reduced0, Mu_Mulcst) -> Mu_C0
         Gemm(X, Ge_Gemmcst, Mu_C0, alpha=-2.00, transB=1) -> Ge_Y0
       Add(Re_reduced0, Ge_Y0) -> Ad_C01
         Add(Ad_Addcst, Ad_C01) -> Ad_C0
           Sqrt(Ad_C0) -> scores
           ArgMin(Ad_C0, axis=1, keepdims=0) -> label
     """).strip(" \n")
     expected3 = textwrap.dedent("""
     ReduceSumSquare(X, axes=[1], keepdims=1) -> Re_reduced0
       Mul(Re_reduced0, Mu_Mulcst) -> Mu_C0
         Gemm(X, Ge_Gemmcst, Mu_C0, alpha=-2.00, transB=1) -> Ge_Y0
       Add(Re_reduced0, Ge_Y0) -> Ad_C01
         Add(Ad_Addcst, Ad_C01) -> Ad_C0
           ArgMin(Ad_C0, axis=1, keepdims=0) -> label
           Sqrt(Ad_C0) -> scores
     """).strip(" \n")
     if (expected1 not in text and expected2 not in text
             and expected3 not in text):
         raise AssertionError("Unexpected value:\n%s" % text)
Esempio n. 7
0
######################################
# First implementation: the operator LeayRelu.


def build_leaky_relu(alpha=0.5, target_opset=15):
    x = OnnxLeakyRelu('X',
                      alpha=alpha,
                      op_version=target_opset,
                      output_names=['Y'])
    return x.to_onnx({'X': FloatTensorType()},
                     outputs={'Y': FloatTensorType()},
                     target_opset=target_opset)


onx_leaky = build_leaky_relu()
print(onnx_simple_text_plot(onx_leaky))

#####################################
# Second option, the formula introduced above must adapted as
# ONNX operator Sign returns -1 if *x* is negative and not 0.


def build_leaky_relu_decomposed(alpha=0.5, target_opset=15):
    signo = OnnxSign('X', op_version=target_opset)
    sign = OnnxDiv(OnnxAdd(signo,
                           numpy.array([1], dtype=numpy.float32),
                           op_version=target_opset),
                   numpy.array([2], dtype=numpy.float32),
                   op_version=target_opset)
    fact = OnnxAdd(OnnxMul(sign,
                           numpy.array([1 - alpha], dtype=numpy.float32),
##############################
# Building the model
# ++++++++++++++++++

X = numpy.random.randn(1000, 10).astype(numpy.float32)
y = X.sum(axis=1)

model = LinearRegression()
model.fit(X, y)

#################################
# Conversion to ONNX
# ++++++++++++++++++
onx = to_onnx(model, X, black_op={'LinearRegressor'})
print(onnx_simple_text_plot(onx))

#################################
# Benchmarks
# ++++++++++

data = []

###################################
# scikit-learn
print('scikit-learn')

with config_context(assume_finite=True):
    obs = measure_time(lambda: model.predict(X),
                       context=dict(model=model, X=X),
                       repeat=repeat,