def main_impl(args):
    # Set up a neural network to train
    # Classifier reports softmax cross entropy loss and accuracy at every
    # iteration, which will be used by the PrintReport extension below.
    model = MLP(args.unit, 10)
    # classifier = L.Classifier(model)
    classifier = MyClassifier(model, compute_accuracy=args.run_training)
    model = classifier

    if args.gpu >= 0:
        # Make a specified GPU current
        chainer.backends.cuda.get_device_from_id(args.gpu).use()
        model.to_gpu()  # Copy the model to the GPU

    if args.run_training:
        run_training(args, model)
        return

    out_dir = 'out/backprop_test_mnist_mlp'

    x = np.random.random((args.batchsize, 784)).astype(np.float32)
    y = (np.random.random(args.batchsize) * 10).astype(np.int32)
    onehot = np.eye(10, dtype=x.dtype)[y]
    x = chainer.Variable(x, name='input')
    onehot = chainer.Variable(onehot, name='onehot')

    chainer.disable_experimental_feature_warning = True
    shutil.rmtree(out_dir, ignore_errors=True)
    onnx_chainer.export_testcase(model, (x, onehot),
                                 out_dir,
                                 output_grad=True,
                                 output_names='loss')
Exemple #2
0
def export_onnx(input_image_path, output_path, gpu, only_output=True):
    """Export ResNet50 model to ONNX graph

    'model.onnx' file will be exported under ``output_path``.
    """
    model = C.ResNet50(pretrained_model='imagenet', arch='fb')

    input_image = read_image(input_image_path)
    input_image = scale(input_image, 256)
    input_image = center_crop(input_image, (224, 224))
    input_image -= model.mean
    input_image = input_image[None, :]

    if gpu >= 0:
        model.to_gpu()
        input_image = chainer.cuda.to_gpu(input_image)

    if only_output:
        os.makedirs(output_path, exist_ok=True)
        name = os.path.join(output_path, 'model.onnx')
        export(model, input_image, filename=name)
    else:
        # an input and output given by Chainer will be also emitted
        # for using as test dataset
        export_testcase(model, input_image, output_path)
Exemple #3
0
def export_onnx(input_image_path, output_path, gpu, only_output=True):
    """Export YOLOv2 Tiny model to ONNX graph

    'model.onnx' file will be exported under ``output_path``.
    """
    model = YOLOv2Tiny(pretrained_model='voc0712')

    input_image = read_image(input_image_path)
    input_image = input_image[None, :]

    if gpu >= 0:
        model.to_gpu()
        input_image = chainer.cuda.to_gpu(input_image)

    if only_output:
        os.makedirs(output_path, exist_ok=True)
        name = os.path.join(output_path, 'model.onnx')
        export(
            model, input_image, filename=name,
            output_names=('locs', 'objs', 'confs'))
    else:
        # an input and output given by Chainer will be also emitted
        # for using as test dataset
        export_testcase(
            model, input_image, output_path,
            output_names=('locs', 'objs', 'confs'))
Exemple #4
0
def main_impl(args, model_cls):
    model = model_cls(compute_accuracy=args.run_training)
    insize = model.insize

    replace_id(model)

    if args.gpu >= 0:
        # Make a specified GPU current
        chainer.backends.cuda.get_device_from_id(args.gpu).use()
        model.to_gpu()  # Copy the model to the GPU

    if args.run_training:
        run_training(args, model)
        return

    out_dir = 'out/backprop_test_%s' % args.arch

    xp = model.xp
    x = xp.random.random(
        (args.batchsize, 3, insize, insize)).astype(np.float32)
    y = (xp.random.random(args.batchsize) * 1000).astype(np.int32)
    onehot = xp.eye(1000, dtype=x.dtype)[y]
    x = chainer.Variable(x, name='input')
    y = chainer.Variable(y, name='y')
    onehot = chainer.Variable(onehot, name='onehot')

    chainer.disable_experimental_feature_warning = True
    shutil.rmtree(out_dir, ignore_errors=True)
    onnx_chainer.export_testcase(model, (x, onehot),
                                 out_dir,
                                 output_grad=True,
                                 output_names='loss')
Exemple #5
0
 def fn(test_name):
     np.random.seed(42)
     x = np.random.rand(1, 3, 224, 224).astype(np.float32)
     test_dir = os.path.join('out', test_name)
     shutil.rmtree(test_dir, ignore_errors=True)
     onnx_chainer.export_testcase(model, [x],
                                  test_dir,
                                  opset_version=9)
Exemple #6
0
def gen_test_data_set(model, args, name, opset_version, **kwargs):
    model.xp.random.seed(42)
    test_path = os.path.join(TEST_OUT_DIR, 'opset{}'.format(opset_version),
                             name)
    onnx_chainer.export_testcase(model,
                                 args,
                                 test_path,
                                 opset_version=opset_version,
                                 **kwargs)
    return test_path
def test_export_external_converters_custom_op(tmpdir, domain, version):
    path = str(tmpdir)

    class Dummy(chainer.FunctionNode):
        def forward_cpu(self, inputs):
            self.x = inputs[0]
            return np.ones_like(inputs[0]),

        def backward(self, indexes, grad_outputs):
            return chainer.Variable(np.zeros_like(self.x)),

    def dummy_function(x):
        return Dummy().apply((x, ))[0]

    model = chainer.Sequential(dummy_function)
    x = input_generator.increasing(2, 5)

    def custom_converter(params):
        return onnx_helper.make_node('Dummy',
                                     params.input_names,
                                     params.output_names,
                                     domain=domain),

    addon_converters = {'Dummy': custom_converter}

    # warnings list
    # 1. `external_converter` is experimental feature
    # 2. `return_named_inout` which is used internally is experimental feature
    expected_warning_num = 2
    external_opset_imports = {}
    if domain is not None:
        external_opset_imports[domain] = version
        # 3. `external_opset_imports` is experimental feature
        expected_warning_num += 1
        if not onnx_helper.is_support_non_standard_domain():
            # 4. ValidationError is ignored
            expected_warning_num += 1
    else:
        # 3. ValidationError is ignored
        expected_warning_num += 1
    with warnings.catch_warnings(record=True) as w:
        export_testcase(model,
                        x,
                        path,
                        external_converters=addon_converters,
                        external_opset_imports=external_opset_imports)
        assert len(w) == expected_warning_num

    output_path = os.path.join(path, 'test_data_set_0', 'output_0.pb')
    assert os.path.isfile(output_path)
    output = onnx.numpy_helper.to_array(onnx.load_tensor(output_path))
    expected_output = np.ones_like(x)
    np.testing.assert_allclose(output, expected_output, rtol=1e-5, atol=1e-5)
def create_backprop_test(test_name, fn, dtype=np.float32, **kwargs):
    test_dir = 'out/backprop_test_oc_%s' % test_name

    params = {}
    for name, value in kwargs.items():
        params[name] = np.array(value, dtype)
    model = AnyModel(fn, params)

    chainer.disable_experimental_feature_warning = True
    onnx_chainer.export_testcase(model, (),
                                 test_dir,
                                 output_grad=True,
                                 output_names='loss')
def create_test(test_name, get_fun, dtype):
    np.random.seed(314)
    chainer.config.dtype = dtype
    model, inputs = get_fun(dtype)

    output_grad = 'backprop' in test_name
    test_dir = 'out/%s' % test_name

    chainer.disable_experimental_feature_warning = True
    onnx_chainer.export_testcase(model,
                                 inputs,
                                 test_dir,
                                 output_grad=output_grad,
                                 train=True,
                                 output_names='loss')
def test_export_external_converters_custom_op(tmpdir, domain, version):
    path = str(tmpdir)

    class Dummy(chainer.FunctionNode):
        def forward_cpu(self, inputs):
            self.x = inputs[0]
            return np.ones_like(inputs[0]),

        def backward(self, indexes, grad_outputs):
            return chainer.Variable(np.zeros_like(self.x)),

    def dummy_function(x):
        return Dummy().apply((x, ))[0]

    model = chainer.Sequential(dummy_function)
    x = input_generator.increasing(2, 5)

    def custom_converter(params):
        return onnx_helper.make_node('Dummy',
                                     params.input_names,
                                     params.output_names,
                                     domain=domain),

    addon_converters = {'Dummy': custom_converter}

    external_opset_imports = {}
    is_set_domain = domain is not None
    if is_set_domain:
        external_opset_imports[domain] = version
    if is_set_domain and onnx_helper.is_support_non_standard_domain():
        export_testcase(model,
                        x,
                        path,
                        external_converters=addon_converters,
                        external_opset_imports=external_opset_imports)
    else:
        with testing.assert_warns(UserWarning):
            export_testcase(model,
                            x,
                            path,
                            external_converters=addon_converters,
                            external_opset_imports=external_opset_imports)

    output_path = os.path.join(path, 'test_data_set_0', 'output_0.pb')
    assert os.path.isfile(output_path)
    output = onnx.numpy_helper.to_array(onnx.load_tensor(output_path))
    expected_output = np.ones_like(x)
    np.testing.assert_allclose(output, expected_output, rtol=1e-5, atol=1e-5)
Exemple #11
0
def test_export_testcase(
        tmpdir, model, x, disable_experimental_warning, in_names, out_names):
    # Just check the existence of pb files
    path = str(tmpdir)
    export_testcase(model, (x,), path,
                    input_names=in_names, output_names=out_names)

    assert os.path.isfile(os.path.join(path, 'model.onnx'))
    input_pb_path = os.path.join(path, 'test_data_set_0', 'input_0.pb')
    assert os.path.isfile(input_pb_path)
    input_tensor = onnx.load_tensor(input_pb_path)
    assert input_tensor.name == (in_names[0] if in_names else 'Input_0')
    output_pb_path = os.path.join(path, 'test_data_set_0', 'output_0.pb')
    assert os.path.isfile(output_pb_path)
    output_tensor = onnx.load_tensor(output_pb_path)
    assert output_tensor.name == (
        out_names[0] if out_names else 'LinearFunction_1')
def test_export_external_converters_overwrite(tmpdir, check_model_expect):
    path = str(tmpdir)

    model = chainer.Sequential(chainer.functions.sigmoid)
    x = input_generator.positive_increasing(2, 5)

    def custom_converter(params):
        return onnx_helper.make_node('Tanh', params.input_names,
                                     params.output_names),

    addon_converters = {'Sigmoid': custom_converter}
    export_testcase(model, x, path, external_converters=addon_converters)

    tanh_outputs = chainer.functions.tanh(x).array
    output_path = os.path.join(path, 'test_data_set_0', 'output_0.pb')
    onnx_helper.write_tensor_pb(output_path, '', tanh_outputs)  # overwrite

    check_model_expect(path)
Exemple #13
0
def test_replace_func_collection_return(tmpdir, return_type):
    path = str(tmpdir)

    class Model(chainer.Chain):
        def __init__(self, return_type):
            super().__init__()
            self.return_type = return_type

        def tiled_array(self, xs, n=5):
            if self.return_type == 'list':
                return [xs.array * i for i in range(1, 1+n)]
            else:
                assert self.return_type == 'dict'
                return {str(i): xs.array * i for i in range(1, 1+n)}

        def __call__(self, xs):
            return self.tiled_array(xs)

    model = Model(return_type)
    x = input_generator.increasing(1, 5)

    with warnings.catch_warnings(record=True):
        model.tiled_array = fake_as_funcnode(model.tiled_array, 'xTiledArray')

    def tiled_array_converter(params):
        return onnx_helper.make_node(
            'xTiledArray', params.input_names, params.output_names),

    addon_converters = {'xTiledArray': tiled_array_converter}

    with testing.assert_warns(UserWarning):
        export_testcase(model, x, path, external_converters=addon_converters)

    model_filepath = os.path.join(path, 'model.onnx')
    assert os.path.isfile(model_filepath)

    onnx_model = onnx.load(model_filepath)
    node_names = [n.name for n in onnx_model.graph.node]
    assert len(node_names) == 1
    assert node_names[0] == 'xTiledArray_0'
    output_names = [n.name for n in onnx_model.graph.output]
    assert len(output_names) == 5
    for i, name in enumerate(output_names):
        assert name == 'xTiledArray_0_{:d}'.format(i)
Exemple #14
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--model', choices=('yolo_v2', 'yolo_v2_tiny', 'yolo_v3'),
        default='yolo_v2')
    parser.add_argument('--gpu', type=int, default=-1)
    parser.add_argument('--pretrained-model', default='voc0712')
    parser.add_argument('--export', action='store_true')
    parser.add_argument('image')
    args = parser.parse_args()

    if args.model == 'yolo_v2':
        model = YOLOv2(
            n_fg_class=len(voc_bbox_label_names),
            pretrained_model=args.pretrained_model)
    elif args.model == 'yolo_v2_tiny':
        model = YOLOv2Tiny(
            n_fg_class=len(voc_bbox_label_names),
            pretrained_model=args.pretrained_model)
    elif args.model == 'yolo_v3':
        model = YOLOv3(
            n_fg_class=len(voc_bbox_label_names),
            pretrained_model=args.pretrained_model)

    if args.gpu >= 0:
        chainer.cuda.get_device_from_id(args.gpu).use()
        model.to_gpu()

    img = utils.read_image(args.image, color=True)

    if args.export:
        import onnx_chainer
        x = model.xp.stack([img])
        onnx_chainer.export_testcase(model, x, args.model)
        return

    bboxes, labels, scores = model.predict([img])
    bbox, label, score = bboxes[0], labels[0], scores[0]

    vis_bbox(
        img, bbox, label, score, label_names=voc_bbox_label_names)
    plt.show()
Exemple #15
0
def create_test(test_name, get_fun, dtype):
    np.random.seed(314)
    chainer.config.dtype = dtype
    model, inputs = get_fun(dtype)

    if chainer.cuda.available:
        model.to_gpu()
        inputs = [chainer.cuda.to_gpu(i) for i in inputs]

    output_grad = 'backprop' in test_name
    test_dir = 'out/%s' % test_name

    chainer.disable_experimental_feature_warning = True
    shutil.rmtree(test_dir, ignore_errors=True)
    onnx_chainer.export_testcase(model,
                                 inputs,
                                 test_dir,
                                 output_grad=output_grad,
                                 train=True,
                                 output_names='loss')
Exemple #16
0
    def test_output(self, tmpdir):
        # first, make expected gradients to temp directory
        expected_result_path = str(tmpdir)

        model = self.get_model()
        x = input_generator.increasing(2, 5)
        export_testcase(model, x, expected_result_path, output_grad=True)

        data_set_name = 'test_data_set_0'
        expected_gradients = [
            os.path.join(expected_result_path, data_set_name,
                         'gradient_{}.pb').format(i) for i in range(2)
        ]
        assert all([os.path.isfile(path) for path in expected_gradients])

        # model.half returns chainer.Variable and enabled backward
        # regardless using replacing
        model.half = fake_as_funcnode(model.half, 'MulConstant')
        x = input_generator.increasing(2, 5)

        def gradient_check(model, path):
            actual_gradients = [
                os.path.join(path, data_set_name, 'gradient_{}.pb').format(i)
                for i in range(2)
            ]
            assert all([os.path.isfile(path) for path in actual_gradients])

            def load_tensor(path):
                tensor = onnx.load_tensor(path)
                return onnx.numpy_helper.to_array(tensor)

            for e_path, a_path in zip(expected_gradients, actual_gradients):
                expected = load_tensor(e_path)
                actual = load_tensor(a_path)
                np.testing.assert_allclose(expected, actual)

        self.expect(model,
                    x,
                    output_grad=True,
                    custom_model_test_func=gradient_check)
Exemple #17
0
def test_output_grad(tmpdir, model, x, disable_experimental_warning):
    path = str(tmpdir)
    export_testcase(model, (x,), path, output_grad=True, train=True)

    model_filename = os.path.join(path, 'model.onnx')
    assert os.path.isfile(model_filename)
    assert os.path.isfile(os.path.join(path, 'test_data_set_0', 'input_0.pb'))
    assert os.path.isfile(os.path.join(path, 'test_data_set_0', 'output_0.pb'))

    onnx_model = onnx.load(model_filename)
    initializer_names = {i.name for i in onnx_model.graph.initializer}

    # 10 gradient files should be there
    for i in range(10):
        tensor_filename = os.path.join(
            path, 'test_data_set_0', 'gradient_{}.pb'.format(i))
        assert os.path.isfile(tensor_filename)
        tensor = onnx.load_tensor(tensor_filename)
        assert tensor.name.startswith('param_')
        assert tensor.name in initializer_names
    assert not os.path.isfile(
        os.path.join(path, 'test_data_set_0', 'gradient_10.pb'))
Exemple #18
0
def test_fake_as_funcnode_keep_structure(tmpdir):
    path = str(tmpdir)

    class Model(chainer.Chain):
        def __init__(self):
            super().__init__()

        def f(self, x):
            return {'a': (x, x+1), 'b': [x+2, x+3, x+4]}

        def __call__(self, x):
            ret = self.f(x)
            return ret['a'][0] + ret['b'][1]

    model = Model()
    x = input_generator.increasing(2, 3)

    with warnings.catch_warnings(record=True):
        model.f = fake_as_funcnode(model.f, 'xF')

    def f_converter(params):
        return onnx_helper.make_node(
            'xF', params.input_names, params.output_names),

    addon_converters = {'xF': f_converter}

    with testing.assert_warns(UserWarning):
        export_testcase(model, x, path, external_converters=addon_converters)
        export_testcase(model, x, ".", external_converters=addon_converters)

    model_filepath = os.path.join(path, 'model.onnx')
    assert os.path.isfile(model_filepath)

    onnx_model = onnx.load(model_filepath)
    node_names = [n.name for n in onnx_model.graph.node]
    assert len(node_names) == 2
    assert node_names[0] == 'xF_0'
    assert len(onnx_model.graph.node[0].output) == 5
    assert len(onnx_model.graph.output) == 1
Exemple #19
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--model',
                        choices=('yolo_v2', 'yolo_v2_tiny', 'yolo_v3'),
                        default='yolo_v2')
    parser.add_argument('--gpu', type=int, default=-1)
    parser.add_argument('--pretrained-model', default='voc0712')
    parser.add_argument('--export', action='store_true')
    parser.add_argument('image')
    args = parser.parse_args()

    if args.model == 'yolo_v2':
        model = YOLOv2(n_fg_class=len(voc_bbox_label_names),
                       pretrained_model=args.pretrained_model)
    elif args.model == 'yolo_v2_tiny':
        model = YOLOv2Tiny(n_fg_class=len(voc_bbox_label_names),
                           pretrained_model=args.pretrained_model)
    elif args.model == 'yolo_v3':
        model = YOLOv3(n_fg_class=len(voc_bbox_label_names),
                       pretrained_model=args.pretrained_model)

    if args.gpu >= 0:
        chainer.cuda.get_device_from_id(args.gpu).use()
        model.to_gpu()

    img = utils.read_image(args.image, color=True)

    if args.export:
        import onnx_chainer
        x = model.xp.stack([img])
        onnx_chainer.export_testcase(model, x, args.model)
        return

    bboxes, labels, scores = model.predict([img])
    bbox, label, score = bboxes[0], labels[0], scores[0]

    vis_bbox(img, bbox, label, score, label_names=voc_bbox_label_names)
    plt.show()
Exemple #20
0
def gen(name, target_mflops, params):
    iw = params['width']
    k = params['ksize']
    s = params['stride']
    g = params['groups']
    d = params['dilate']

    nc_flops = non_channel_flops(params)
    sqch = target_mflops * 1000 * 1000 / nc_flops

    if 'ichan2' in params:
        oc = decide_channel(sqch / 2, g)
        ic = oc * 2
    elif 'ochan2' in params:
        oc = decide_channel(sqch / 2, g)
        ic = oc * 2
    elif 'dwise' in params:
        ic = round(sqch)
        oc = ic
        g = ic
    elif 'dwise2' in params:
        ic = near_even(sqch / 2)
        oc = ic
        g = ic // 2
    else:
        ic = decide_channel(sqch, g)
        oc = ic

    p = (k // 2) * d

    print(ic, oc, g)
    conv = L.Convolution2D(ic, oc, k, s, p, nobias=True, dilate=d, groups=g)
    x = np.random.rand(1, ic, iw, iw).astype(np.float32)

    name = 'conv_%dmflops_%d_%s' % (target_mflops, iw, name)
    onnx_chainer.export_testcase(conv, x, name)
Exemple #21
0
import onnx_chainer
from chainercv2.model_provider import get_model as chcv2_get_model

parser = argparse.ArgumentParser(description='Export ChainerCV2 model')
parser.add_argument('name')
parser.add_argument('--gpu', type=int, default=-1)
parser.add_argument('--shape', default='1,3,224,224')
parser.add_argument('--image', type=str)

args = parser.parse_args()

model = chcv2_get_model(args.name, pretrained=True)
if args.gpu >= 0:
    chainer.cuda.get_device_from_id(args.gpu).use()
    model.to_gpu()

shape = tuple(map(int, args.shape.split(',')))
if args.image is None:
    x = model.xp.random.rand(*shape).astype(model.xp.float32)
else:
    from PIL import Image
    img = Image.open(args.image)
    img = img.resize(shape[2:])
    x = model.xp.array(np.asarray(img))
    x = x / 256.0
    x = x.astype(model.xp.float32)
    x = x.reshape(shape)

sizestr = 'x'.join(map(str, shape[2:]))
onnx_chainer.export_testcase(model, x, '%s_%s' % (args.name, sizestr))
Exemple #22
0
#!/usr/bin/env python

import argparse
import chainer
import numpy as np

import chainercv.links as C
import onnx_chainer

parser = argparse.ArgumentParser()
parser.add_argument('--gpu', type=int, default=-1)
parser.add_argument('--size', type=int, default=224)
parser.add_argument('--model', default='ResNet50')
parser.add_argument('--pretrained-model', type=str)
parser.add_argument('--kwargs', default='{}')
args = parser.parse_args()

kwargs = eval(args.kwargs)
if args.pretrained_model is not None:
    kwargs['pretrained_model'] = args.pretrained_model

model = getattr(C, args.model)(**kwargs)
if args.gpu >= 0:
    chainer.cuda.get_device_from_id(args.gpu).use()
    model.to_gpu()

# Pseudo input
x = model.xp.zeros((1, 3, args.size, args.size), dtype=np.float32)

onnx_chainer.export_testcase(model, x, '%s_%d' % (args.model, args.size))
Exemple #23
0
 def fn(test_name):
     np.random.seed(42)
     x = np.random.rand(1, 3, 224, 224).astype(np.float32)
     onnx_chainer.export_testcase(model, [x],
                                  os.path.join('out', test_name),
                                  opset_version=9)
Exemple #24
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import argparse
import chainer
import numpy as np

import chainercv.links as C
import onnx_chainer

parser = argparse.ArgumentParser()
parser.add_argument('--gpu', type=int, default=-1)
parser.add_argument('--size', type=int, default=224)
args = parser.parse_args()

model = C.ResNet50()
if args.gpu >= 0:
    chainer.cuda.get_device_from_id(args.gpu).use()
    model.to_gpu()

# Pseudo input
x = model.xp.zeros((1, 3, args.size, args.size), dtype=np.float32)

onnx_chainer.export_testcase(model, x, 'resnet50_%d' % args.size)
Exemple #25
0
import chainer
import chainer.functions as F
import numpy as np
import onnx
import onnx_chainer
from onnx_chainer.replace_func import as_funcnode


class Sign(chainer.Chain):
    def forward(self, x):
        y = F.relu(x)
        y = F.sign(y)
        y = F.relu(y)
        return y


F.sign = as_funcnode('Sign')(F.sign)


def convert_sign(param):
    return onnx.helper.make_node('Sign', param.input_names,
                                 param.output_names),


external_converters = {'Sign': convert_sign}

model = Sign()
onnx_chainer.export_testcase(model, [np.array(3.14)],
                             'sign',
                             external_converters=external_converters)