def check_onnx_model(onnx_model, external_converters, external_opset_imports): try: checker.check_model(onnx_model) except onnx.checker.ValidationError as e: if external_converters is None: raise e else: # ONNX version >= 1.5: default checker skips schema check when # non standard domain is set. In ONNX-Chainer, external ops without # doamin is also accepted, but show warning. # ONNX version < 1.5: the checker does not skip schema check # regardless domain is set or not. In ONNX-Chainer, ignore # errors when external ops are set. if is_support_non_standard_domain(): if external_opset_imports: raise e else: warnings.warn( 'ValidationError is occurred but ignored. ' 'ONNX-Chainer recommends to set ' '`external_opset_imports` when using ' '`external_converters` on exporting. Please take care ' 'about ONNX format check is insufficient. Error ' 'message:\n{}'.format(str(e)), UserWarning) else: warnings.warn( 'ValidationError is occurred but ignored because ' 'exporting with `external_converters`. Please take care ' 'about ONNX format check is insufficient. Error ' 'message:\n{}'.format(str(e)), UserWarning)
def test_export_external_converters_custom_op(tmpdir, domain, version): path = str(tmpdir) class Dummy(chainer.FunctionNode): def forward_cpu(self, inputs): self.x = inputs[0] return np.ones_like(inputs[0]), def backward(self, indexes, grad_outputs): return chainer.Variable(np.zeros_like(self.x)), def dummy_function(x): return Dummy().apply((x, ))[0] model = chainer.Sequential(dummy_function) x = input_generator.increasing(2, 5) def custom_converter(params): return onnx_helper.make_node('Dummy', params.input_names, params.output_names, domain=domain), addon_converters = {'Dummy': custom_converter} # warnings list # 1. `external_converter` is experimental feature # 2. `return_named_inout` which is used internally is experimental feature expected_warning_num = 2 external_opset_imports = {} if domain is not None: external_opset_imports[domain] = version # 3. `external_opset_imports` is experimental feature expected_warning_num += 1 if not onnx_helper.is_support_non_standard_domain(): # 4. ValidationError is ignored expected_warning_num += 1 else: # 3. ValidationError is ignored expected_warning_num += 1 with warnings.catch_warnings(record=True) as w: export_testcase(model, x, path, external_converters=addon_converters, external_opset_imports=external_opset_imports) assert len(w) == expected_warning_num output_path = os.path.join(path, 'test_data_set_0', 'output_0.pb') assert os.path.isfile(output_path) output = onnx.numpy_helper.to_array(onnx.load_tensor(output_path)) expected_output = np.ones_like(x) np.testing.assert_allclose(output, expected_output, rtol=1e-5, atol=1e-5)
def test_export_external_converters_custom_op(tmpdir, domain, version): path = str(tmpdir) class Dummy(chainer.FunctionNode): def forward_cpu(self, inputs): self.x = inputs[0] return np.ones_like(inputs[0]), def backward(self, indexes, grad_outputs): return chainer.Variable(np.zeros_like(self.x)), def dummy_function(x): return Dummy().apply((x, ))[0] model = chainer.Sequential(dummy_function) x = input_generator.increasing(2, 5) def custom_converter(params): return onnx_helper.make_node('Dummy', params.input_names, params.output_names, domain=domain), addon_converters = {'Dummy': custom_converter} external_opset_imports = {} is_set_domain = domain is not None if is_set_domain: external_opset_imports[domain] = version if is_set_domain and onnx_helper.is_support_non_standard_domain(): export_testcase(model, x, path, external_converters=addon_converters, external_opset_imports=external_opset_imports) else: with testing.assert_warns(UserWarning): export_testcase(model, x, path, external_converters=addon_converters, external_opset_imports=external_opset_imports) output_path = os.path.join(path, 'test_data_set_0', 'output_0.pb') assert os.path.isfile(output_path) output = onnx.numpy_helper.to_array(onnx.load_tensor(output_path)) expected_output = np.ones_like(x) np.testing.assert_allclose(output, expected_output, rtol=1e-5, atol=1e-5)
def _export(model, args, filename, export_params, graph_name, save_text, opset_version, input_names, output_names, return_named_inout, external_converters, external_opset_imports): if opset_version is None: opset_version = int(onnx.defs.onnx_opset_version()) elif opset_version < MINIMUM_OPSET_VERSION: warnings.warn( 'ONNX-Chainer has been tested only with opset_version >= {m}. ' 'This is because ONNXRuntime supports only opset_version >= {m}. ' 'The ONNX file exported with your requested opset_version ({o}) ' 'may cause some problems because the converters used for the ' 'opset_version have not been tested.'.format( m=MINIMUM_OPSET_VERSION, o=opset_version)) # Forward computation context = Context(model) network_inputs = OrderedDict() if isinstance(args, tuple): args = list(args) if isinstance(args, list): for i, arg in enumerate(args): if isinstance(arg, chainer.get_array_types()): args[i] = chainer.Variable(arg) network_inputs[context.get_name(args[i])] = args[i] outputs = model(*args) elif isinstance(args, dict): for key, arg in args.items(): if isinstance(arg, chainer.get_array_types()): args[key] = chainer.Variable(arg) network_inputs[context.get_name(args[key])] = args[key] outputs = model(**args) elif isinstance(args, chainer.get_array_types()): args = chainer.Variable(args) network_inputs[context.get_name(args)] = args outputs = model(args) elif isinstance(args, chainer.Variable): network_inputs[context.get_name(args)] = args outputs = model(args) else: raise ValueError( 'The \'args\' argument should be a list, tuple, dict, ' 'numpy array, or Chainer Variable. But a {} object was ' 'given.'.format(type(args))) rename_variable_name(context, args, network_inputs, input_names) initializers = [] input_tensors = [] param_names = set() for org_name, param in model.namedparams(): # `model.namedparams()` has `include_uninit` flag but not use, to # output user warning if param.array is None: warnings.warn( 'The parameter \'{}\' is not initialized, skip setting to ' 'ONNX graph'.format(org_name)) continue name = context.get_name(param) param_names.add(name) tensor = convert_parameter(param, context) initializers.append(tensor) input_tensors.append( helper.make_tensor_value_info(name, tensor.data_type, tensor.dims)) for name, var in network_inputs.items(): input_tensors.append( helper.make_tensor_value_info(name, NP_TYPE_TO_TENSOR_TYPE[var.dtype], var.shape)) if external_converters: chainer.utils.experimental('external_converters') converters = dict(mapping.converters, **external_converters) else: converters = mapping.converters if isinstance(outputs, (list, tuple)): flat_outputs = outputs elif isinstance(outputs, dict): flat_outputs = list(outputs.values()) elif isinstance(outputs, chainer.Variable): flat_outputs = [outputs] else: raise RuntimeError('Unexpected output type from the model: {}'.format( type(outputs))) if not all([isinstance(o, chainer.Variable) for o in flat_outputs]): raise ValueError('The all \'outputs\' must be Chainer Variable') network_outputs = OrderedDict([(context.get_name(var), var) for var in flat_outputs]) if output_names: rename_variable_name(context, outputs, network_outputs, output_names) o = Graph(context, converters, opset_version, network_outputs) o.to_onnx_graph() implicit_input_names = set(o.inputs.keys()) - param_names -\ set(network_inputs.keys()) for name in implicit_input_names: tensor = convert_parameter(o.inputs[name], context) initializers.append(tensor) input_tensors.append( helper.make_tensor_value_info(name, tensor.data_type, tensor.dims)) # If additional parameters are created during conversion for param in context.parameters: tensor = convert_parameter(param, context) initializers.append(tensor) input_tensors.append( helper.make_tensor_value_info(context.get_name(param), tensor.data_type, tensor.dims)) # Convert output tensors output_tensors = [] for name, var in network_outputs.items(): output_tensors.append( helper.make_tensor_value_info(name, NP_TYPE_TO_TENSOR_TYPE[var.dtype], var.shape)) if not export_params: initializers = [] onnx_graph = helper.make_graph(o.graph, graph_name, input_tensors, output_tensors, initializer=initializers) opset_imports = [helper.make_operatorsetid('', opset_version)] if external_opset_imports: chainer.utils.experimental('external_opset_imports') for domain, version in external_opset_imports.items(): opset_imports.append(helper.make_operatorsetid(domain, version)) model = helper.make_model(onnx_graph, producer_name='Chainer', producer_version=chainer.__version__, opset_imports=opset_imports) model.ir_version = onnx.IR_VERSION try: checker.check_model(model) except onnx.checker.ValidationError as e: if external_converters is None: raise e else: # ONNX version >= 1.5: default checker skips schema check when # non standard domain is set. In ONNX-Chainer, external ops without # doamin is also accepted, but show warning. # ONNX version < 1.5: the checker does not skip schema check # regardless domain is set or not. In ONNX-Chainer, ignore # errors when external ops are set. if is_support_non_standard_domain(): if external_opset_imports: raise e else: warnings.warn( 'ValidationError is occurred but ignored. ' 'ONNX-Chainer recommends to set ' '`external_opset_imports` when using ' '`external_converters` on exporting. Please take care ' 'about ONNX format check is insufficient. Error ' 'message:\n{}'.format(str(e))) else: warnings.warn( 'ValidationError is occurred but ignored because ' 'exporting with `external_converters`. Please take care ' 'about ONNX format check is insufficient. Error ' 'message:\n{}'.format(str(e))) if filename is not None and isinstance(filename, str): with open(filename, 'wb') as fp: fp.write(model.SerializeToString()) if save_text: with open(filename + '.txt', 'w') as fp: print(model, file=fp) elif hasattr(filename, 'write'): filename.write(model.SerializeToString()) if return_named_inout: chainer.utils.experimental('return_named_inout') return model, network_inputs, network_outputs return model