示例#1
0
 def test_grad_helper_mul(self):
     opv = opset
     xi = OnnxIdentity('X', op_version=opv)
     node = OnnxMul(xi, xi, op_version=opv, output_names=['Y'])
     onx = node.to_onnx({'X': FloatTensorType([None, 10])},
                        {'Y': FloatTensorType([None, 10])},
                        target_opset=opv)
     new_onx = onnx_derivative(onx)
     self.check_runtime(new_onx, 'test_grad_helper_mul')
示例#2
0
 def test_grad_helper_exc(self):
     opv = opset
     node = OnnxAdd('X',
                    numpy.array([1], dtype=numpy.float32),
                    op_version=opv,
                    output_names=['Y'])
     onx = node.to_onnx({'X': FloatTensorType([None, 10])},
                        {'Y': FloatTensorType([None, 10])},
                        target_opset=opv)
     self.assertRaise(lambda: onnx_derivative(onx, weights=[], options=1),
                      TypeError)
示例#3
0
 def test_grad_helper_fillgrad(self):
     opv = opset
     node = OnnxAdd('X',
                    numpy.array([1], dtype=numpy.float32),
                    op_version=opv,
                    output_names=['Y'])
     onx = node.to_onnx({'X': FloatTensorType([None, 10])},
                        {'Y': FloatTensorType([None, 10])},
                        target_opset=opv)
     self.assertRaise(
         lambda: onnx_derivative(
             onx, weights=[], options=DerivativeOptions.FillGrad),
         ValueError)
     new_onx = onnx_derivative(onx,
                               weights=[],
                               options=(DerivativeOptions.FillGrad
                                        | DerivativeOptions.KeepOutputs))
     input_names = set(i.name for i in new_onx.graph.input)
     self.assertNotIn('Y_grad', input_names)
     self.check_runtime(new_onx, 'test_grad_helper_fillgrad', verbose=False)
示例#4
0
 def test_grad_helper_noweight(self):
     opv = opset
     node = OnnxAdd('X',
                    numpy.array([1], dtype=numpy.float32),
                    op_version=opv,
                    output_names=['Y'])
     onx = node.to_onnx({'X': FloatTensorType([None, 10])},
                        {'Y': FloatTensorType([None, 10])},
                        target_opset=opv)
     new_onx = onnx_derivative(onx, weights=[])
     self.check_runtime(new_onx, 'test_grad_helper_noweight')
示例#5
0
 def test_grad_helper(self):
     opv = opset
     node = OnnxAdd('X',
                    numpy.array([1], dtype=numpy.float32),
                    op_version=opv,
                    output_names=['Y'])
     onx = node.to_onnx({'X': FloatTensorType([None, 10])},
                        {'Y': FloatTensorType([None, 10])},
                        target_opset=opv)
     new_onx = onnx_derivative(onx)
     out_names = [o.name for o in new_onx.graph.output]
     self.assertNotIn('Y', out_names)
     self.check_runtime(new_onx, 'test_grad_helper')
示例#6
0
 def test_grad_helper_keep_yield(self):
     opv = opset
     node = OnnxAdd('X',
                    numpy.array([1], dtype=numpy.float32),
                    op_version=opv,
                    output_names=['Y'])
     onx = node.to_onnx({'X': FloatTensorType([None, 10])},
                        {'Y': FloatTensorType([None, 10])},
                        target_opset=opv)
     new_onx = onnx_derivative(onx, options=DerivativeOptions.KeepYieldOp)
     types = set(n.op_type for n in new_onx.graph.node)
     self.assertIn('YieldOp', types)
     with open(f"verbose_{'yield'}.onnx", "wb") as f:
         f.write(new_onx.SerializeToString())
示例#7
0
 def test_grad_helper_loss(self):
     temp = get_temp_folder(__file__, "temp_grad_helper_loss")
     grad_file = os.path.join(temp, "grad.onnx")
     X, y = make_regression(  # pylint: disable=W0632
         100,
         n_features=10,
         bias=2,
         random_state=0)
     X = X.astype(numpy.float32)
     y = y.astype(numpy.float32)
     reg = LinearRegression()
     reg.fit(X, y)
     reg.coef_ = reg.coef_.reshape((1, -1))
     onx = to_onnx(reg, X, target_opset=opset, black_op={'LinearRegressor'})
     onx_loss = add_loss_output(onx)
     text1 = onnx_simple_text_plot(onx_loss)
     new_onx = onnx_derivative(onx,
                               options=DerivativeOptions.Loss,
                               label='variable',
                               loss='loss',
                               path_name=grad_file)
     text2 = onnx_simple_text_plot(new_onx)
     self.assertNotEqual(text1, text2)