def test_output(self):
     for opset_version in range(
             onnx_chainer.MINIMUM_OPSET_VERSION,
             onnx.defs.onnx_opset_version() + 1):
         with self.assertRaises(RuntimeError):
             onnx_chainer.export(
                 self.model, self.x, opset_version=opset_version)
Example #2
0
def export_onnx(input_image_path, output_path, gpu, only_output=True):
    """Export ResNet50 model to ONNX graph

    'model.onnx' file will be exported under ``output_path``.
    """
    model = C.ResNet50(pretrained_model='imagenet', arch='fb')

    input_image = read_image(input_image_path)
    input_image = scale(input_image, 256)
    input_image = center_crop(input_image, (224, 224))
    input_image -= model.mean
    input_image = input_image[None, :]

    if gpu >= 0:
        model.to_gpu()
        input_image = chainer.cuda.to_gpu(input_image)

    if only_output:
        os.makedirs(output_path, exist_ok=True)
        name = os.path.join(output_path, 'model.onnx')
        export(model, input_image, filename=name)
    else:
        # an input and output given by Chainer will be also emitted
        # for using as test dataset
        export_testcase(model, input_image, output_path)
Example #3
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--model',
                        type=str,
                        required=True,
                        help='Model directory path.')
    parser.add_argument('--out',
                        type=str,
                        required=True,
                        help='ONNX file output path.')
    parser.add_argument('--gpu', type=int, default=0, help='GPU id.')
    args = parser.parse_args()

    # Predefined parameters.
    n_actions = 4  # env.action_space.n
    replay_start_size = 5 * 10**4

    # Load the model.
    q_func = links.Sequence(links.NatureDQNHead(), L.Linear(512, n_actions),
                            DiscreteActionValue)
    opt = chainer.optimizers.RMSpropGraves(lr=2.5e-4,
                                           alpha=0.95,
                                           momentum=0.0,
                                           eps=1e-2)
    opt.setup(q_func)
    rbuf = replay_buffer.ReplayBuffer(10**6)
    explorer = explorers.LinearDecayEpsilonGreedy(
        start_epsilon=1.0,
        end_epsilon=0.1,
        decay_steps=10**6,
        random_action_func=lambda: np.random.randint(n_actions))

    def phi(x):
        # Feature extractor
        return np.asarray(x, dtype=np.float32) / 255

    Agent = agents.DQN
    agent = Agent(q_func,
                  opt,
                  rbuf,
                  gpu=args.gpu,
                  gamma=0.99,
                  explorer=explorer,
                  replay_start_size=replay_start_size,
                  target_update_interval=10**4,
                  clip_delta=True,
                  update_interval=4,
                  batch_accumulator='sum',
                  phi=phi)
    agent.load(args.model)

    # Extract core links from the model and export these links as an ONNX format.
    onnx_compat_model = convert_to_compatible_model(agent)
    x = cp.array(np.zeros((1, 4, 84, 84), dtype=np.float32))
    onnx_chainer.export(onnx_compat_model,
                        x,
                        input_names='input',
                        output_names='action',
                        return_named_inout=True,
                        filename=args.out)
Example #4
0
def export_onnx(input_image_path, output_path, gpu, only_output=True):
    """Export YOLOv2 Tiny model to ONNX graph

    'model.onnx' file will be exported under ``output_path``.
    """
    model = YOLOv2Tiny(pretrained_model='voc0712')

    input_image = read_image(input_image_path)
    input_image = input_image[None, :]

    if gpu >= 0:
        model.to_gpu()
        input_image = chainer.cuda.to_gpu(input_image)

    if only_output:
        os.makedirs(output_path, exist_ok=True)
        name = os.path.join(output_path, 'model.onnx')
        export(
            model, input_image, filename=name,
            output_names=('locs', 'objs', 'confs'))
    else:
        # an input and output given by Chainer will be also emitted
        # for using as test dataset
        export_testcase(
            model, input_image, output_path,
            output_names=('locs', 'objs', 'confs'))
Example #5
0
def test_get_item_error(slices):
    model = chainer.Sequential(
        lambda x: F.get_item(x, slices=slices))
    x = input_generator.increasing(2, 3, 4)

    with pytest.raises(ValueError):
        export(model, x)
def create_onnx_test(graph_name, model, inputs, builtins, out_dir):
    # TODO(hamaji): Investigate why we need to set train=False for ResNet50.
    chainer.config.train = False
    makedirs(out_dir)
    with replace_id(model, builtins):
        onnx_chainer.export(model,
                            inputs,
                            filename='%s/model.onnx' % out_dir,
                            graph_name=graph_name)

    onnx_extra_inputs = []
    if hasattr(model, 'extra_inputs'):
        onnx_extra_inputs = model.extra_inputs

    test_data_dir = '%s/test_data_set_0' % out_dir
    makedirs(test_data_dir)
    for i, var in enumerate(list(inputs) + list(onnx_extra_inputs)):
        with open(os.path.join(test_data_dir, 'input_%d.pb' % i), 'wb') as f:
            t = numpy_helper.from_array(var.data, 'Input_%d' % i)
            f.write(t.SerializeToString())

    chainer.config.train = True
    model.cleargrads()
    result = model(*inputs)
    result.grad = np.ones(result.shape, result.dtype)
    result.backward()

    outputs = [('', result.array)]
    for name, param in model.namedparams():
        outputs.append(('grad_out@' + name, param.grad))
    for i, (name, value) in enumerate(outputs):
        with open(os.path.join(test_data_dir, 'output_%d.pb' % i), 'wb') as f:
            t = numpy_helper.from_array(value, name)
            f.write(t.SerializeToString())
Example #7
0
    def test_compatibility(self):
        chainer.config.train = False
        with chainer.using_config('train', False):
            chainer_out = self.model(self.x).array

        onnx_chainer.export(self.model,
                            self.x,
                            self.fn,
                            opset_version=self.opset_version)

        model_onnx = onnx.load(self.fn)
        sym, params = nnvm.frontend.from_onnx(model_onnx)

        target = 'llvm'
        input_name = sym.list_input_names()[0]

        shape_dict = {input_name: self.x.shape}
        graph, lib, params = nnvm.compiler.build(sym,
                                                 target,
                                                 shape_dict,
                                                 params=params,
                                                 dtype={input_name: 'float32'})
        module = tvm.contrib.graph_runtime.create(graph, lib, tvm.cpu(0))
        module.set_input(input_name, tvm.nd.array(self.x))
        module.set_input(**params)
        module.run()

        out_shape = (1, 1000)
        output = tvm.nd.empty(out_shape, ctx=tvm.cpu(0))
        nnvm_output = module.get_output(0, output).asnumpy()

        np.testing.assert_almost_equal(chainer_out, nnvm_output, decimal=5)

        os.remove(self.fn)
Example #8
0
def main(logger):
    parser = argparse.ArgumentParser()
    parser.add_argument('--model')
    parser.add_argument('--out', required=False)
    args = parser.parse_args()

    if args.out is None:
        output_filename = args.model + ".onnx"
    else:
        output_filename = args.out

    logger.info("Generating `{}` model and save it to `{}`".format(
        args.model, output_filename))

    try:
        if args.model == 'and_op':
            model = AND()
            x = np.empty((1, 2), dtype=np.float32)
            with chainer.using_config('train', False), \
                    mock.patch('builtins.id', IDGenerator()):
                onnx_chainer.export(model, x, filename=output_filename)

        elif args.model == 'mlp':
            model = MLP()
            x = np.empty((1, 3), dtype=np.float32)
            with chainer.using_config('train', False), \
                    mock.patch('builtins.id', IDGenerator()):
                onnx_chainer.export(model, x, filename=output_filename)

    except Exception:
        logger.exception("An error occurred during generation of the model")
Example #9
0
def check_output(model, x):
    with tempfile.NamedTemporaryFile('wb') as fp:
        onnx_chainer.export(model, x, fp)

        sym, params = onnx_mxnet.import_model(fp.name)

        mod = mx.mod.Module(symbol=sym,
                            data_names=['input_0'],
                            context=mx.cpu(),
                            label_names=None)
        mod.bind(for_training=False,
                 data_shapes=[('input_0', x.shape)],
                 label_shapes=None)
        mod.set_params(arg_params=params, aux_params=None, allow_missing=True)

        Batch = namedtuple('Batch', ['data'])
        mod.forward(Batch([mx.nd.array(x)]))

        mxnet_out = mod.get_outputs()[0].asnumpy()

        y = model(x)
        if isinstance(y, dict):
            y = y['prob']
        chainer_out = y.array

        np.testing.assert_almost_equal(chainer_out, mxnet_out, decimal=5)
Example #10
0
def main():
    chainer.config.train = False

    model = FastStyleNet()
    serializers.load_npz(
        './chainer-fast-neuralstyle-models/models/starrynight.model', model)

    input = './test1.jpg'
    original = Image.open(input).convert('RGB')
    print(original.size)
    image = np.asarray(original, dtype=np.float32).transpose(2, 0, 1)
    image = image.reshape((1, ) + image.shape)
    padding = 0  #50
    if padding > 0:
        image = np.pad(
            image, [[0, 0], [0, 0], [padding, padding], [padding, padding]],
            'symmetric')
    x = image

    out = model(x)
    out = out.data[0]
    print(out.shape)
    print('model done.')

    postprocess(out)

    print('export onnx...')
    onnx_chainer.export(model, x, filename='FastStyleNet.onnx')
Example #11
0
 def test_compatibility(self):
     if MXNET_OPSET_VERSION[self.name] is not None:
         for mxnet_opset_version in MXNET_OPSET_VERSION[self.name]:
             test_mxnet.check_compatibility(
                 self.model, self.x, self.fn, opset_version=mxnet_opset_version)
     for opset_version in range(1, onnx.defs.onnx_opset_version() + 1):
         onnx_chainer.export(self.model, self.x,
                             opset_version=opset_version)
Example #12
0
 def test_compatibility(self):
     test_mxnet.check_compatibility(self.model,
                                    self.x,
                                    self.fn,
                                    opset_version=self.opset_version)
     onnx_chainer.export(self.model,
                         self.x,
                         opset_version=self.opset_version)
Example #13
0
 def test_compatibility(self):
     test_mxnet.check_compatibility(self.model,
                                    self.x,
                                    self.fn,
                                    opset_version=self.opset_version)
     for opset_version in range(1, onnx.defs.onnx_opset_version() + 1):
         onnx_chainer.export(self.model,
                             self.x,
                             opset_version=opset_version)
Example #14
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('out')
    args = parser.parse_args()

    model = MLP()
    x = np.empty((1, 3), dtype=np.float32)
    with chainer.using_config('train', False), \
            mock.patch('builtins.id', IDGenerator()):
        onnx_chainer.export(model, x, filename=args.out)
Example #15
0
def convert_model_to_onnx(input_shape, onnx_file_path):
    # Export Chainer model to ONNX
    model = L.VGG16(pretrained_model='imagenet')

    # Pseudo input
    x = np.zeros(input_shape, dtype=np.float32)

    # Don't forget to set train flag off!
    chainer.config.train = False

    onnx_chainer.export(model, x, filename=onnx_file_path)
def main():
    x = numpy.zeros((1, 3, 32, 32), dtype=numpy.float32)

    # disable rename_tensors version
    export(Model(use_bn=True), x, filename='output/A.onnx')
    export(Model(use_bn=False), x, filename='output/B.onnx')

    # disable check model in onnx_chainer.export
    checker.check_model = lambda x: None

    onnx_chainer.export(Model(use_bn=True), x, filename='output/C.onnx')
    onnx_chainer.export(Model(use_bn=False), x, filename='output/D.onnx')
def predict(args):
    classes = np.genfromtxt(os.path.join(args.dataset, "meta", "classes.txt"),
                            str,
                            delimiter="\n")
    model, preprocess, xp, test_dataset = prepare_setting(args)

    from chainer.exporters import caffe
    import onnx_chainer
    # x = [chainer.Variable(np.zeros((1, 3, 224, 224), np.float32))]
    # caffe.export(model, x, None, True, 'test')
    x = np.zeros((1, 3, 224, 224), dtype=np.float32)
    onnx_chainer.export(model, x, filename='text.onnx')

    top_1_counter = 0
    top_5_counter = 0
    top_10_counter = 0
    indices = list(range(len(test_dataset)))
    num_iteration = len(indices) if args.sample < 0 else args.sample
    random.shuffle(indices)
    with chainer.function.no_backprop_mode(), chainer.using_config(
            'train', False):
        for i in indices[:num_iteration]:
            img, label = test_dataset.get_example(i)
            h = model.predictor(xp.expand_dims(xp.array(img), axis=0))
            print(xp.expand_dims(xp.array(img), axis=0).shape)
            prediction = chainer.functions.softmax(h)
            if args.device >= 0:
                prediction = xp.asnumpy(prediction[0].data)
            else:
                prediction = prediction[0].data
            top_ten = np.argsort(-prediction)[:10]
            top_five = top_ten[:5]
            if top_five[0] == label:
                top_1_counter += 1
                top_5_counter += 1
                top_10_counter += 1
                msg = "Bingo!"
            elif label in top_five:
                top_5_counter += 1
                top_10_counter += 1
                msg = "matched top 5"
            elif label in top_ten:
                top_10_counter += 1
                msg = "matched top 10"
            else:
                msg = "Boo, actual {}".format(classes[label])
            print(classes[top_five], prediction[top_five], msg)
        print('top1 accuracy', top_1_counter / num_iteration)
        print('top5 accuracy', top_5_counter / num_iteration)
        print('top10 accuracy', top_10_counter / num_iteration)
Example #18
0
def save_as_onnx_then_import_from_nnvm(model, fn):
    # Prepare an input tensor
    x = np.random.rand(1, 3, 224, 224).astype(np.float32) * 255

    # Run the model on the data
    with chainer.using_config('train', False):
        chainer_out = model(x).array

    # Export Chainer model into ONNX
    onnx_chainer.export(model, x, fn)

    # Load the saved ONNX file using ONNX module
    model_onnx = onnx.load(fn)

    # Convert the ONNX model object into NNVM symbol
    sym, params = nnvm.frontend.from_onnx(model_onnx)

    # Choose the compilation target
    target = 'llvm'

    # Extract the name of input variable in the ONNX graph
    input_name = sym.list_input_names()[0]
    shape_dict = {input_name: x.shape}

    # Compile the model using NNVM
    graph, lib, params = nnvm.compiler.build(sym,
                                             target,
                                             shape_dict,
                                             params=params,
                                             dtype={input_name: 'float32'})

    # Convert the compiled model into TVM module
    module = tvm.contrib.graph_runtime.create(graph, lib, tvm.cpu(0))

    # Set the input tensor x
    module.set_input(input_name, tvm.nd.array(x))
    module.set_input(**params)

    # Run the model
    module.run()

    # Retrieve the inference result
    out_shape = (1, 1000)
    output = tvm.nd.empty(out_shape, ctx=tvm.cpu(0))
    nnvm_output = module.get_output(0, output).asnumpy()

    # Check both outputs have same values
    np.testing.assert_almost_equal(chainer_out, nnvm_output, decimal=5)
Example #19
0
    def gen_onnx_model(self, inputs):
        import onnx_chainer

        self.onnx_dir = self.model_dir
        data_dir = os.path.join(self.onnx_dir, 'test_data_set_0')
        utils.makedirs(data_dir)

        onnx_filename = os.path.join(self.onnx_dir, 'model.onnx')
        if self.is_up_to_date(onnx_filename):
            return False

        onnx_chainer.export(self.model,
                            list(inputs),
                            filename=onnx_filename + '.tmp',
                            graph_name=self.name)
        return True
def export_onnx(args):
    config = load_config(args)
    model = MyModel(config)
    chainer.serializers.load_npz(os.path.join(args.model, 'bestmodel.npz'),
                                 model)
    w, h = parse_size(config.get('model_param', 'insize'))
    x = np.zeros((1, 3, h, w), dtype=np.float32)
    logger.info('begin export')
    output = os.path.join(args.model, 'bestmodel.onnx')
    with chainer.using_config('train', False):
        onnx_chainer.export(model, x, filename=output)
    logger.info('end export')
    logger.info('run onnx.check')
    onnx_model = onnx.load(output)
    onnx.checker.check_model(onnx_model)
    logger.info('done')
Example #21
0
def test_invalid_customized_input_shape(x_shape, shape_option):
    model = chainer.Sequential(F.relu)

    if isinstance(x_shape, tuple):
        xs = np.zeros(x_shape, dtype=np.float32)
    elif isinstance(x_shape, list):
        xs = tuple(np.zeros(shape, dtype=np.float32) for shape in x_shape)
    else:
        assert isinstance(x_shape, dict)
        xs = {
            k: np.zeros(shape, dtype=np.float32)
            for k, shape in x_shape.items()
        }

    with pytest.raises(ValueError):
        export(model, xs, input_shapes=shape_option)
Example #22
0
def save_as_onnx_then_import_from_mxnet(model, fn):
    # Prepare an input tensor
    x = np.random.rand(1, 3, 224, 224).astype(np.float32) * 255

    # Run the model on the data
    with chainer.using_config('train', False):
        chainer_out = model(x).array

    # Export Chainer model into ONNX
    onnx_chainer.export(model, x, fn)

    # Load ONNX model into MXNet symbol
    sym, arg, aux = mxnet.contrib.onnx.import_model(fn)

    # Find the name of input tensor
    data_names = [
        graph_input for graph_input in sym.list_inputs()
        if graph_input not in arg and graph_input not in aux
    ]
    data_shapes = [(data_names[0], x.shape)]

    # Create MXNet model
    mod = mxnet.mod.Module(symbol=sym,
                           data_names=data_names,
                           context=mxnet.cpu(),
                           label_names=None)
    mod.bind(for_training=False, data_shapes=data_shapes, label_shapes=None)
    mod.set_params(arg_params=arg,
                   aux_params=aux,
                   allow_missing=True,
                   allow_extra=True)

    # Create input data
    Batch = collections.namedtuple('Batch', ['data'])
    input_data = Batch([mxnet.nd.array(x)])

    # Forward computation using MXNet
    mod.forward(input_data)

    # Retrieve the output of forward result
    mxnet_out = mod.get_outputs()[0].asnumpy()

    # Check the prediction results are same
    assert np.argmax(chainer_out) == np.argmax(mxnet_out)

    # Check both outputs have same values
    np.testing.assert_almost_equal(chainer_out, mxnet_out, decimal=5)
Example #23
0
def test_fake_as_funcnode_without_replace():

    class Model(chainer.Chain):
        def _init__(self):
            super().__init__()

        def add(self, xs, value=0.01):
            return xs.array + value

        def __call__(self, xs):
            return F.sigmoid(self.add(xs))

    model = Model()
    x = input_generator.increasing(3, 4)

    with pytest.raises(onnx.checker.ValidationError):
        export(model, x)
Example #24
0
def check_output(model, x):
    with tempfile.NamedTemporaryFile('wb') as fp:
        onnx_chainer.export(model, x, fp)
        onnx_model = onnx.ModelProto.FromString(open(fp.name, 'rb').read())

        init_net, predict_net = Caffe2Backend.onnx_graph_to_caffe2_net(
            onnx_model.graph, device='CPU')

        benchmark_caffe2_model(init_net, predict_net)

        y = model(x)
        if isinstance(y, dict):
            y = y['prob']
        chainer_out = y.array
        caffe2_out = run_model(onnx_model, [x])[0]

        np.testing.assert_almost_equal(chainer_out, caffe2_out, decimal=5)
Example #25
0
def check_output(model, x, fn, out_key='prob', opset_version=None):
    if opset_version is None:
        opset_version = onnx.defs.onnx_opset_version()
    if not ONNXRUNTIME_AVAILABLE:
        raise ImportError('check_output requires onnxruntime.')

    chainer.config.train = False

    # Forward computation
    if isinstance(x, (list, tuple)):
        for i in x:
            assert isinstance(i, (np.ndarray, chainer.Variable))
        chainer_out = model(*x)
        x = tuple(
            _x.array if isinstance(_x, chainer.Variable) else _x for _x in x)
    elif isinstance(x, np.ndarray):
        chainer_out = model(chainer.Variable(x))
        x = (x,)
    elif isinstance(x, chainer.Variable):
        chainer_out = model(x)
        x = (x.array,)
    else:
        raise ValueError(
            'The \'x\' argument should be a list or tuple of numpy.ndarray or '
            'chainer.Variable, or simply a numpy.ndarray or a chainer.Variable'
            ' itself. But a {} object was given.'.format(type(x)))

    if isinstance(chainer_out, (list, tuple)):
        chainer_out = (y.array for y in chainer_out)
    elif isinstance(chainer_out, dict):
        chainer_out = chainer_out[out_key]
        if isinstance(chainer_out, chainer.Variable):
            chainer_out = (chainer_out.array,)
    elif isinstance(chainer_out, chainer.Variable):
        chainer_out = (chainer_out.array,)
    else:
        raise ValueError('Unknown output type: {}'.format(type(chainer_out)))

    onnx_model = onnx_chainer.export(model, x, fn, opset_version=opset_version)
    sess = rt.InferenceSession(onnx_model.SerializeToString())
    input_names = [i.name for i in sess.get_inputs()]

    # To detect unexpected inputs created by exporter, check input names
    # TODO(disktnk): `input_names` got from onnxruntime session includes only
    #                network inputs, does not include internal inputs such as
    #                weight attribute etc. so that need to collect network
    #                inputs from `onnx_model`.
    initialized_graph_input_names = {
        i.name for i in onnx_model.graph.initializer}
    graph_input_names = [i.name for i in onnx_model.graph.input
                         if i.name not in initialized_graph_input_names]
    assert input_names == list(sorted(graph_input_names))

    rt_out = sess.run(
        None, {name: array for name, array in zip(input_names, x)})

    for cy, my in zip(chainer_out, rt_out):
        np.testing.assert_almost_equal(cy, my, decimal=5)
Example #26
0
    def compile(self, inputs):
        if self.translator == 'ch2o':
            xmodel = ch2o.compile_model(self.mc, inputs)
            f = tempfile.NamedTemporaryFile(delete=False)
            f.write(xmodel.SerializeToString())
            f.close()
            del xmodel
        elif self.translator == 'onnx_chainer':
            import onnx_chainer
            f = tempfile.NamedTemporaryFile(delete=False)
            onnx_chainer.export(self.mc, inputs, filename=f)
            f.close()
        else:
            raise NotImplementedError('Unsupported translator:',
                                      self.translator)

        graph = chainer_compiler_core.load(f.name)
        os.unlink(f.name)

        self.orig_output_names = graph.output_names()

        if self.computation_order is None:
            fwd_graph, bwd_graph = graph.backward_to(graph.input_names() +
                                                     graph.param_names())
        else:
            fwd_graph, bwd_graph = graph.backward_to_with_order(
                self.computation_order)
        if self.dump_onnx:
            sys.stderr.write('=== vvv forward vvv ===\n' + fwd_graph.dump() +
                             '\n=== ^^^ forward ^^^ ===\n')
            sys.stderr.write('=== vvv backward vvv ===\n' + bwd_graph.dump() +
                             '\n=== ^^^ backward ^^^ ===\n')

        assert graph.input_names() == fwd_graph.input_names()
        self.fwd_input_names = fwd_graph.input_names()
        self.fwd_output_names = fwd_graph.output_names()
        self.bwd_input_names = bwd_graph.input_names()
        self.bwd_output_names = bwd_graph.output_names()
        # TODO(hamaji): Revive shape inference.
        self.fwd = fwd_graph.compile(skip_inference=True)
        self.bwd = bwd_graph.compile(skip_inference=True)
        self.param_names = fwd_graph.param_names()

        self.compiled = True
def _run_translator(translator, mc, inputs):
    if translator == 'ch2o':
        from chainer_compiler import ch2o
        xmodel = ch2o.compile_model(mc, inputs)
        f = tempfile.NamedTemporaryFile(delete=False)
        f.write(xmodel.SerializeToString())
        f.close()
        del xmodel
    elif translator == 'onnx_chainer':
        import onnx_chainer
        f = tempfile.NamedTemporaryFile(delete=False)
        onnx_chainer.export(mc, inputs, filename=f)
        f.close()
    else:
        raise NotImplementedError('Unsupported translator:', translator)

    graph = _chainer_compiler_core.load(f.name)
    os.unlink(f.name)

    return graph
def main():
    for key, value in srcnn.archs.items():
        model_dir = '../models/{}'.format(key.lower())
        for filename in os.listdir(model_dir):
            basename, ext = os.path.splitext(filename)
            onnx_path = os.path.join(model_dir, basename + '.onnx')
            if ext == '.npz':
                model_path = os.path.join(model_dir, filename)
                print(model_path)
                channels = 3 if 'rgb' in filename else 1
                model = value(channels)
                size = 64 + model.offset
                data = np.zeros((1, channels, size, size), dtype=np.float32)
                x = chainer.Variable(data)
                try:
                    chainer.serializers.load_npz(model_path, model)
                    caffe.export(model, [x], model_dir, True, basename)
                    rename_caffe_model(model_dir, basename)
                except Exception:
                    print('Skipped caffe model export')
                onnx_chainer.export(model, x, filename=onnx_path)
Example #29
0
    def test_output_type_check(self):
        class Model(chainer.Chain):
            def __init__(self, out_kind):
                super().__init__()
                self.out_kind = out_kind

            def __call__(self, x):
                if self.out_kind == 'array':
                    return x.array
                elif self.out_kind == 'array_in_tuple':
                    return x, x.array
                elif self.out_kind == 'list_in_tuple':
                    return ([x]),
                else:
                    assert self.out_kind == 'var'
                    return x

        model = Model(self.out_kind)
        x = np.ones((1, 3, 4, 5), dtype=np.float32)

        if self.out_kind == 'var':
            export(model, (x, ))  # should be no error
        elif self.out_kind == 'array':
            with self.assertRaises(RuntimeError) as e:
                export(model, (x, ))
            assert 'Unexpected output type'.find(e.exception.args[0])
        else:
            with self.assertRaises(ValueError) as e:
                export(model, (x, ))
            assert 'must be Chainer Variable'.find(e.exception.args[0])
def export(model, inputs, filename=None, translator='onnx_chainer'):
    if translator == 'ch2o':
        from chainer_compiler import ch2o
        xmodel = ch2o.compile_model(model, inputs)
        if filename is None:
            f = tempfile.NamedTemporaryFile(delete=False)
        else:
            f = open(filename, 'wb')
        f.write(xmodel.SerializeToString())
        f.close()
        del xmodel
    elif translator == 'onnx_chainer':
        import onnx_chainer
        if filename is None:
            f = tempfile.NamedTemporaryFile(delete=False)
        else:
            f = open(filename, 'wb')
        onnx_chainer.export(model, inputs, filename=f)
        f.close()
    else:
        raise NotImplementedError('Unsupported translator:', translator)

    return f.name
Example #31
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import sys
import numpy as np
import cupy

import chainer
import chainercv.links as C
import onnx_chainer

model = C.ResNet50(pretrained_model='imagenet', arch='he')
#model = C.VGG16(pretrained_model='imagenet')
model.to_gpu()

# Pseudo input
x = chainer.Variable(cupy.zeros((1, 3, 224, 224), dtype=np.float32))

onnx_chainer.export(model, x, filename='resnet50.onnx')