def _test_network(self, fun):
        with utils.EnvVars(TF_CPP_MIN_LOG_LEVEL=3):
            fun_name = fun.__name__

            network_outputs = fun()
            checkpoint_path = os.path.join("out", "checkpoint", fun_name,
                                           fun_name + ".ckpt")
            feed_dict = get_feed_dict()
            checkpoint_path = save_random_checkpoint(network_outputs,
                                                     checkpoint_path,
                                                     feed_dict)

            tf.reset_default_graph()
            tf.set_random_seed(0)

            command = """
            ./nnef_tools/convert.py 
                --input-format tensorflow-py \\
                --input-model nnef_tests.activation_export.tf_py_network_test_cases.{fun_name} {cp_path}\\
                --output-format nnef \\
                --output-model out/nnef/{fun_name}.nnef.tgz \\
                --compress \\
                --conversion-info""".format(fun_name=fun_name,
                                            cp_path=checkpoint_path)
            print(command)
            convert_using_command(command)

            command = """
            ./nnef_tools/export_activation.py
                --input-format tensorflow-py \\
                --input-model nnef_tests.activation_export.tf_py_network_test_cases.{fun_name} {cp_path} \\
                --conversion-info out/nnef/{fun_name}.nnef.tgz.conversion.json
            """.format(fun_name=fun_name, cp_path=checkpoint_path)
            print(command)
            export_activation_using_command(command)
Example #2
0
    def _test_network(self, prototxt_path, model_path):
        network = os.path.basename(model_path.rsplit('.', 1)[0])

        command = """
        ./nnef_tools/convert.py --input-format caffe \\
                                --input-model {prototxt_path} {caffemodel_path} \\
                                --output-format nnef \\
                                --output-model out/nnef/{network}.nnef.tgz \\
                                --compress \\
                                --conversion-info""".format(
            prototxt_path=prototxt_path,
            caffemodel_path=model_path,
            network=network)
        print(command)
        convert_using_command(command)

        command = """
        ./nnef_tools/export_activation.py --input-format caffe \\
                                          --input-model {prototxt_path} {caffemodel_path} \\
                                          --conversion-info out/nnef/{network}.nnef.tgz.conversion.json
        """.format(prototxt_path=prototxt_path,
                   caffemodel_path=model_path,
                   network=network)
        print(command)
        export_activation_using_command(command)
    def _test_model(self, filename, run=True, compare=True, max_val=255.0):

        network_name = filename.rsplit('/', 1)[1].rsplit('.', 1)[0].replace(
            '.', '_').replace('-', '_')
        print(filename)
        command = """
        ./nnef_tools/convert.py --input-format=tensorflow-lite \\
                                --output-format=nnef \\
                                --input-model={input} \\
                                --output-model=out/nnef/{network}.nnef \\
                                --permissive \\
                                --io-transform SMART_NHWC_TO_NCHW \\
                                --conversion-info
        """.format(input=filename, network=network_name)
        print(command)
        convert.convert_using_command(command)

        command = """
        ./nnef_tools/convert.py --input-format=nnef \\
                                --output-format=tensorflow-lite \\
                                --input-model=out/nnef/{network}.nnef \\
                                --output-model=out/tflite/{network}.tflite \\
                                --permissive \\
                                --io-transform SMART_NCHW_TO_NHWC \\
                                --conversion-info
        """.format(network=network_name)
        print(command)
        convert.convert_using_command(command)

        activation_testing = int(os.environ.get('NNEF_ACTIVATION_TESTING',
                                                '1'))
        print("Activation testing is", "ON" if activation_testing else "OFF")
        if activation_testing:
            import numpy as np

            output, output2, input = None, None, None
            if run:
                output, input = self.run_model(model_path=filename,
                                               max_val=max_val)
                output2, _ = self.run_model(
                    model_path="out/tflite/{}.tflite".format(network_name),
                    input_data=input,
                    max_val=max_val)
            if compare:
                print('Compare:')
                print(output.shape, np.min(output), np.mean(output),
                      np.max(output))
                print(output2.shape, np.min(output2), np.mean(output2),
                      np.max(output2))
                self.assertTrue(np.all(np.isfinite(output)))
                self.assertTrue(np.all(np.isfinite(output2)))
                self.assertTrue(np.allclose(output, output2, atol=1e-5))
Example #4
0
    def _test_model(self, filename, run=True, compare=True, max_val=255.0):
        output, output2, input = None, None, None
        if run:
            output, input = self.run_model(model_path=filename,
                                           max_val=max_val)

        network_name = filename.rsplit('/', 1)[1].rsplit('.', 1)[0].replace(
            '.', '_').replace('-', '_')
        print(filename)
        command = """
        ./nnef_tools/convert.py --input-framework=tensorflow-lite \\
                                --output-framework=nnef \\
                                --input-model={input} \\
                                --output-directory=out/nnef/{network} \\
                                --permissive
        """.format(input=filename, network=network_name)
        print(command)
        convert.convert_using_command(command)

        command = """
        ./nnef_tools/convert.py --input-framework=nnef \\
                                --output-framework=tensorflow-lite \\
                                --input-model=out/nnef/{network}/model \\
                                --output-directory=out/tflite/{network} \\
                                --permissive
        """.format(network=network_name)
        print(command)
        convert.convert_using_command(command)

        if run:
            output2, _ = self.run_model(
                model_path="out/tflite/{}/model.tflite".format(network_name),
                input_data=input,
                max_val=max_val)

        if compare:
            print('Compare:')
            print(output.shape, np.min(output), np.mean(output),
                  np.max(output))
            print(output2.shape, np.min(output2), np.mean(output2),
                  np.max(output2))
            self.assertTrue(np.all(np.isfinite(output)))
            self.assertTrue(np.all(np.isfinite(output2)))
            self.assertTrue(np.allclose(output, output2, atol=1e-5))
    def _test_network(self, path, size):
        network = os.path.basename(path.rsplit('.', 1)[0])
        input_shape = "(float32, [2, {size}, {size}, 3])".format(size=size)
        command = """
        ./nnef_tools/convert.py --input-framework=tensorflow-pb \\
                                --input-model={} \\
                                --input-shape="{}" \\
                                --output-framework=nnef \\
                                --output-directory=out/nnef/{} \\
                                --compress""".format(path, input_shape,
                                                     network)
        print(command)
        convert.convert_using_command(command)

        command = """
        ./nnef_tools/convert.py --input-framework=nnef \\
                                --input-model=out/nnef/{}/model.nnef.tgz \\
                                --output-framework=tensorflow-pb \\
                                --output-directory=out/tensorflow-pb/{}""".format(
            network, network)
        print(command)
        convert.convert_using_command(command)

        tf.reset_default_graph()
        load_graph_from_pb(path)

        [input] = get_placeholders()
        outputs = get_tensors_with_no_consumers()
        feed = np.random.random([2, size, size, 3])

        with tf.Session() as sess:
            activations = sess.run(outputs, feed_dict={input: feed})

        tf.reset_default_graph()
        load_graph_from_pb('out/tensorflow-pb/{}/model.pb'.format(network))

        [input] = get_placeholders()
        outputs = get_tensors_with_no_consumers()

        with tf.Session() as sess:
            activations2 = sess.run(outputs, feed_dict={input: feed})

        for a1, a2 in zip(activations, activations2):
            self.assertTrue(np.allclose(a1, a2))
    def _test_network(self, path):
        network = os.path.basename(path.rsplit('.', 1)[0])

        command = """
        ./nnef_tools/convert.py --input-format=tensorflow-pb \\
                                --input-model={path} \\
                                --output-format=nnef \\
                                --output-model=out/nnef/{network}.nnef.tgz \\
                                --compress \\
                                --conversion-info""".format(path=path, network=network)
        print(command)
        convert_using_command(command)

        command = """
        ./nnef_tools/export_activation.py --input-format tensorflow-pb \\
                                          --input-model {path} \\
                                          --conversion-info out/nnef/{network}.nnef.tgz.conversion.json
        """.format(path=path, network=network)
        print(command)
        export_activation_using_command(command)
Example #7
0
    def _test_model(self,
                    filename,
                    run=True,
                    compare=True,
                    source_shape="None",
                    custom_converters=None):
        if custom_converters is None:
            custom_converters = []

        convs = ["onnx_to_nnef_" + conv for conv in custom_converters]
        network_name = filename.rsplit('/', 1)[1].rsplit('.', 1)[0].replace(
            '.', '_').replace('-', '_')
        print(filename)
        command = """
        ./nnef_tools/convert.py --input-framework=onnx \\
                                --output-framework=nnef \\
                                --input-model={} \\
                                --output-directory=out/nnef/{} \\
                                --input-shape="{}" \\
                                --custom-converters="{}" \\
                                --permissive
        """.format(filename, network_name, source_shape, ','.join(convs))
        print(command)
        convert.convert_using_command(command)

        convs = ["nnef_to_onnx_" + conv for conv in custom_converters]
        command = """
        ./nnef_tools/convert.py --input-framework=nnef \\
                                --output-framework=onnx \\
                                --input-model=out/nnef/{}/model \\
                                --output-directory=out/onnx/{} \\
                                --custom-converters="{}" \\
                                --permissive
        """.format(network_name, network_name, ','.join(convs))
        print(command)
        convert.convert_using_command(command)

        filename2 = os.path.join('out', 'onnx', network_name, 'model.onnx')
        check_onnx_model(filename2)

        g = onnx_io.read_onnx_from_protobuf(filename2)
        input_shapes = [input.shape for input in g.inputs]
        input_dtypes = [input.dtype for input in g.inputs]

        del g

        if run:
            inputs = []
            for input_shape, input_dtype in zip(input_shapes, input_dtypes):
                if input_dtype == 'FLOAT':
                    inputs.append(
                        np.random.random(input_shape).astype(np.float32) *
                        0.8 + 0.1)
                elif input_dtype == 'BOOL':
                    inputs.append(np.random.random(input_shape) > 0.5)
                elif input_dtype == 'INT64':
                    inputs.append((np.random.random(input_shape) *
                                   1000).astype(np.int32))
                else:
                    assert False

            outputs = None
            if compare:
                print('Running original ONNX:')
                outputs = run_onnx_model(filename, inputs)

            print('Running our ONNX:')
            outputs2 = run_onnx_model(filename2, inputs)

            if compare:
                print('Comparing:')
                for output, output2 in zip(outputs, outputs2):
                    # print('Max dist:', np.max(np.abs(output-output2)))
                    self.assertTrue(np.all(np.isfinite(output)))
                    self.assertTrue(np.all(np.isfinite(output2)))
                    self.assertTrue(np.allclose(output, output2, atol=1e-5))
    def _test_model(self,
                    predict_net_path,
                    init_net_path,
                    value_info_path,
                    feed_dict_override=None,
                    test_shapes=True,
                    test_outputs=True,
                    can_run=True,
                    can_compare=True,
                    can_convert=True):
        network_name = utils.split_path(predict_net_path)[-2]
        print('Testing {}: {}, {}, {}'.format(network_name, predict_net_path,
                                              init_net_path, value_info_path))
        reader = Reader()
        g = reader(predict_net_path, init_net_path, value_info_path)
        input_name_shape_dtypes = [(tensor.name, tensor.shape, tensor.dtype)
                                   for tensor in g.inputs]
        output_shapes = [t.shape for t in g.outputs]

        our_dir = os.path.join('out', 'caffe2', network_name)
        if can_convert:
            print("Converting...")
            nnef_path = os.path.join('out', 'nnef', network_name + '.nnef')
            command = """
            ./nnef_tools/convert.py --input-format caffe2 \\
                                    --output-format nnef \\
                                    --input-model {predict_net} {init_net} {value_info} \\
                                    --output-model {nnef}
            """.format(predict_net=predict_net_path,
                       init_net=init_net_path,
                       value_info=value_info_path,
                       nnef=nnef_path)
            print(command)
            convert_using_command(command)

            command = """
            ./nnef_tools/convert.py --input-format nnef \\
                                    --output-format caffe2 \\
                                    --input-model {nnef} \\
                                    --output-model {out_dir}
            """.format(nnef=nnef_path, out_dir=our_dir)
            print(command)
            convert_using_command(command)
        else:
            print("Writing out model without conversion...")
            writer = Writer()
            writer(g, our_dir)
            del g

        activation_testing = int(os.environ.get('NNEF_ACTIVATION_TESTING',
                                                '1'))
        if not activation_testing:
            print("Activation testing is OFF")
        if can_run and activation_testing:

            from caffe2.python import workspace

            feed_dict = self._get_random_feed_dict(input_name_shape_dtypes)

            if feed_dict_override:
                feed_dict.update(feed_dict_override)

            print('Running original Caffe2 model...')
            outputs = run_caffe2_model(predict_net_path, init_net_path,
                                       feed_dict)

            if can_convert:
                print('Running converted Caffe2 model...')
            else:
                print('Running generated Caffe2 model...')
            feed_dict2 = {
                k.replace('/', '_'): v
                for k, v in six.iteritems(feed_dict)
            }
            outputs2 = run_caffe2_model(
                os.path.join(our_dir, 'predict_net.pb'),
                os.path.join(our_dir, 'init_net.pb'), feed_dict2)

            if can_compare:
                print("Comparing...")
                self.assertEqual(
                    {
                        k.replace('/', '_'): v
                        for k, v in six.iteritems(
                            json_utils.load(value_info_path))
                    }, json_utils.load(os.path.join(our_dir,
                                                    'value_info.json')))

                for output, output2, output_shape in zip(
                        outputs, outputs2, output_shapes):
                    if test_shapes:
                        self.assertEqual(tuple(output_shape), output.shape)
                        self.assertEqual(tuple(output_shape), output2.shape)
                    if test_outputs:
                        self.assertTrue(np.all(np.isfinite(output)))
                        self.assertTrue(np.all(np.isfinite(output2)))
                        self.assertTrue(np.allclose(output, output2,
                                                    atol=1e-5))
        print('Passed.\n\n')
    def _test(self,
              fun,
              cmp=True,
              custom_tf_to_nnef_converters="",
              custom_nnef_to_tf_converters="",
              test_module="nnef_tests.conversion.tf_py_layer_test_cases",
              atol=1e-5):

        activation_testing = int(os.environ.get('NNEF_ACTIVATION_TESTING',
                                                '1'))
        print("Activation testing is", "ON" if activation_testing else "OFF")

        out_dir = os.path.join("out", fun.__name__)
        try:
            tf.reset_default_graph()
            tf.set_random_seed(0)

            network_outputs = fun()
            feed_dict = get_feed_dict()
            old_names = [
                placeholder.name for placeholder in get_placeholders()
            ]
            checkpoint_path = os.path.join("out", fun.__name__,
                                           "orig_checkpoint",
                                           fun.__name__ + ".ckpt")
            checkpoint_path = save_random_checkpoint(network_outputs,
                                                     checkpoint_path,
                                                     feed_dict)

            tf.reset_default_graph()
            tf.set_random_seed(0)

            compress_nnef = False
            command = """
                ./nnef_tools/convert.py --input-format tensorflow-py \\
                                        --output-format nnef \\
                                        --input-model {module}.{network} {checkpoint} \\
                                        --output-model out/{network}/{network}.nnef{tgz} \\
                                        --custom-converters {custom} \\
                                        --permissive \\
                                        --io-transformation SMART_TF_NHWC_TO_NCHW \\
                                        {compress}
            """.format(checkpoint=checkpoint_path if checkpoint_path else "",
                       network=fun.__name__,
                       custom=" ".join(custom_tf_to_nnef_converters),
                       compress="--compress" if compress_nnef else "",
                       module=test_module,
                       tgz=".tgz" if compress_nnef else "")

            convert.convert_using_command(command)

            if activation_testing:
                tf.reset_default_graph()
                tf.set_random_seed(0)
                network_outputs = fun()
                network_output_list = []
                utils.recursive_visit(network_outputs,
                                      lambda t: network_output_list.append(t))
                # Flatten is needed because of MaxPoolWithArgMax objects
                outputs = utils.flatten(
                    self._run_tfpy(network_output_list, feed_dict,
                                   checkpoint_path))
            else:
                outputs = None

            prefer_nhwc_options = [True]
            if tf_has_cuda_gpu():
                prefer_nhwc_options += [False]
            for prefer_nhwc in prefer_nhwc_options:
                print("Converting to TensorFlow {}".format(
                    "NHWC" if prefer_nhwc else "NCHW"))
                data_format_str = ("nhwc" if prefer_nhwc else "nchw")
                tf_output_path = os.path.join(
                    "out", fun.__name__,
                    fun.__name__ + '_' + data_format_str + '.py')
                command = """
                    ./nnef_tools/convert.py --input-format nnef \\
                                            --output-format tensorflow-py \\
                                            --input-model out/{network}/{network}.nnef{tgz} \\
                                            --output-model {output} \\
                                            --io-transformation SMART_NCHW_TO_TF_NHWC \\
                                            --custom-converters {custom} \\
                                            --permissive
                """.format(network=fun.__name__,
                           custom=" ".join(custom_nnef_to_tf_converters),
                           tgz=".nnef.tgz" if compress_nnef else "",
                           output=tf_output_path)
                convert.convert_using_command(command)

                with open(os.path.join(tf_output_path), 'r') as f:
                    tf_src = f.read()

                # noinspection PyProtectedMember
                new_net_fun = tf_py_io._tfsource_to_function(
                    tf_src, fun.__name__)

                tf.reset_default_graph()
                tf.set_random_seed(0)

                if activation_testing:
                    tf.reset_default_graph()
                    tf.set_random_seed(0)
                    network_outputs = new_net_fun()
                    network_output_list = []
                    utils.recursive_visit(
                        network_outputs,
                        lambda t: network_output_list.append(t))
                    feed_dict2 = {
                        placeholder.name: feed_dict[old_names[i]]
                        for i, placeholder in enumerate(get_placeholders())
                    }
                    outputs2 = utils.flatten(
                        self._run_tfpy(
                            network_output_list, feed_dict2,
                            (os.path.join(tf_output_path + ".checkpoint")
                             if checkpoint_path else None)))

                    if cmp:
                        self.assertTrue(len(outputs) == len(outputs2))
                        for a, b in zip(outputs, outputs2):
                            if a.dtype == np.bool:
                                self.assertTrue(np.all(a == b))
                            else:
                                print('Max diff:', np.max(np.abs(a - b)))
                                self.assertTrue(np.all(np.isfinite(a)))
                                self.assertTrue(np.all(np.isfinite(b)))
                                self.assertTrue(np.allclose(a, b, atol=atol))

        finally:
            if self.delete_dats_and_checkpoints:
                dat_files = utils.recursive_glob(out_dir, "*.dat")
                checkpoints = utils.recursive_glob(out_dir, "*ckpt*")
                for file_name in set(dat_files + checkpoints):
                    os.remove(file_name)
Example #10
0
    def _test_model(self, prototxt_path, model_path, nonnegative_input=False):
        network_name = utils.split_path(prototxt_path)[-2]
        print('Testing {}: {}, {}'.format(network_name, prototxt_path,
                                          model_path))
        reader = Reader()
        g = reader(prototxt_path, model_path)

        writer = Writer()
        our_prototxt_path = os.path.join('out', 'caffe-ours', network_name,
                                         network_name + '.prototxt')
        our_model_path = os.path.join('out', 'caffe-ours', network_name,
                                      network_name + '.caffemodel')
        writer(g, our_prototxt_path)
        del g

        nnef_path = os.path.join('out', 'nnef', network_name + '.nnef')
        command = """
        ./nnef_tools/convert.py --input-format caffe \\
                                --output-format nnef \\
                                --input-model {prototxt} {caffemodel} \\
                                --output-model {nnef}
        """.format(prototxt=prototxt_path,
                   caffemodel=model_path,
                   nnef=nnef_path)
        print(command)
        convert_using_command(command)

        converted_prototxt_path = os.path.join('out', 'caffe-converted',
                                               network_name,
                                               network_name + '.prototxt')
        converted_model_path = os.path.join('out', 'caffe-converted',
                                            network_name,
                                            network_name + '.caffemodel')
        command = """
        ./nnef_tools/convert.py --input-format nnef \\
                                --output-format caffe \\
                                --input-model {nnef} \\
                                --output-model {prototxt}
        """.format(nnef=nnef_path, prototxt=converted_prototxt_path)
        print(command)
        convert_using_command(command)

        if int(os.environ.get('NNEF_ACTIVATION_TESTING', '1')):
            with utils.EnvVars(GLOG_minloglevel=3):
                import caffe
                import numpy as np

                def random_inputs(net):
                    # type: (caffe.Net) -> typing.Dict[str, np.ndarray]
                    np.random.seed(0)
                    sub = 0.0 if nonnegative_input else 0.5
                    return {
                        input:
                        np.random.random(list(net.blobs[input].shape)) - sub
                        for input in net.inputs
                    }

                print("Running original net...")
                net = caffe.Net(prototxt_path, model_path, caffe.TEST)
                out_orig = net.forward(**random_inputs(net))

                print("Running generated net...")
                net = caffe.Net(our_prototxt_path, our_model_path, caffe.TEST)
                out_ours = net.forward(**random_inputs(net))
                self.assertEqual(len(out_orig), len(out_ours))
                for orig_name, other_name, orig_arr, other_arr in zip(
                        out_orig.keys(), out_ours.keys(), out_orig.values(),
                        out_ours.values()):
                    self.assertTrue(np.allclose(orig_arr, other_arr))

                print("Running converted net...")
                net = caffe.Net(converted_prototxt_path, converted_model_path,
                                caffe.TEST)
                out_converted = net.forward(**random_inputs(net))

                self.assertEqual(len(out_orig), len(out_converted))
                for orig_name, other_name, orig_arr, other_arr in zip(
                        out_orig.keys(), out_converted.keys(),
                        out_orig.values(), out_converted.values()):
                    self.assertTrue(
                        np.allclose(orig_arr, other_arr, rtol=1e-5, atol=1e-5))
        print('Done')
Example #11
0
    def _test_model(self,
                    filename,
                    run=True,
                    compare=True,
                    source_shape="None",
                    custom_converters=None):
        if custom_converters is None:
            custom_converters = []

        convs = ["custom.onnx_to_nnef_" + conv for conv in custom_converters]
        network_name = filename.rsplit('/', 1)[1].rsplit('.', 1)[0].replace(
            '.', '_').replace('-', '_')
        print(filename)
        command = """
        ./nnef_tools/convert.py --input-format onnx \\
                                --output-format nnef \\
                                --input-model {} \\
                                --output-model out/nnef/{}.nnef \\
                                --input-shape "{}" \\
                                --custom-converters {} \\
                                --permissive \\
                                --conversion-info
        """.format(filename, network_name, source_shape, ' '.join(convs))
        print(command)
        convert.convert_using_command(command)

        convs = ["custom.nnef_to_onnx_" + conv for conv in custom_converters]
        command = """
        ./nnef_tools/convert.py --input-format nnef \\
                                --output-format onnx \\
                                --input-model out/nnef/{}.nnef \\
                                --output-model out/onnx/{}.onnx \\
                                --custom-converters {} \\
                                --permissive \\
                                --conversion-info
        """.format(network_name, network_name, ' '.join(convs))
        print(command)
        convert.convert_using_command(command)

        filename2 = os.path.join('out', 'onnx', network_name + '.onnx')

        g = onnx_io.read_onnx_from_protobuf(filename2)
        input_shapes = [input.shape for input in g.inputs]
        input_dtypes = [input.dtype for input in g.inputs]

        del g

        activation_testing = int(os.environ.get('NNEF_ACTIVATION_TESTING',
                                                '1'))
        print("Activation testing is", "ON" if activation_testing else "OFF")
        if activation_testing:
            import numpy as np
            import caffe2.python.onnx.backend as backend
            import onnx

            def check_onnx_model(filename):
                model = onnx.load(filename)
                onnx.checker.check_model(model)

            def run_onnx_model(filename, input):
                model = onnx.load(filename)
                try:
                    rep = backend.prepare(model, device="CUDA:0")
                    outputs = rep.run(input)
                except Exception:
                    print("Couldn't run in CUDA, running on CPU:")
                    rep = backend.prepare(model, device="CPU")
                    outputs = rep.run(input)

                return outputs

            check_onnx_model(filename2)

            if run:
                inputs = []
                for input_shape, input_dtype in zip(input_shapes,
                                                    input_dtypes):
                    if input_dtype == 'FLOAT':
                        inputs.append(
                            np.random.random(input_shape).astype(np.float32) *
                            0.8 + 0.1)
                    elif input_dtype == 'BOOL':
                        inputs.append(np.random.random(input_shape) > 0.5)
                    elif input_dtype == 'INT64':
                        inputs.append((np.random.random(input_shape) *
                                       1000).astype(np.int64))
                    else:
                        assert False

                outputs = None
                if compare:
                    print('Running original ONNX:')
                    outputs = run_onnx_model(filename, inputs)

                print('Running converted ONNX model...')
                outputs2 = run_onnx_model(filename2, inputs)

                if compare:
                    print('Comparing:')
                    for output, output2 in zip(outputs, outputs2):
                        # print('Max dist:', np.max(np.abs(output-output2)))
                        self.assertTrue(np.all(np.isfinite(output)))
                        self.assertTrue(np.all(np.isfinite(output2)))
                        self.assertTrue(np.allclose(output, output2,
                                                    atol=1e-5))
Example #12
0
    def _test_network(self, path, source_shape=1, ignore_extra_outputs=False):
        assert utils.is_anyint(source_shape) or isinstance(
            source_shape, (list, tuple))

        network = os.path.basename(path.rsplit('/', 2)[1])
        command = """
        ./nnef_tools/convert.py --input-format=tensorflow-pb \\
                                --input-model={path} \\
                                --output-format=nnef \\
                                --output-model=out/nnef/{network}.nnef.tgz \\
                                --compress \\
                                --input-shape="{shape}" """.format(
            path=path, network=network, shape=source_shape)
        print(command)
        convert.convert_using_command(command)

        for prefer_nchw in [False, True]:
            print("Prefer NCHW" if prefer_nchw else "Prefer NHWC")

            prefer_nchw_opt = "--prefer-nchw" if prefer_nchw else ""
            prefer_nchw_str = "_prefer_nchw" if prefer_nchw else ""

            command = """
            ./nnef_tools/convert.py --input-format=nnef \\
                                    --input-model=out/nnef/{network}.nnef.tgz \\
                                    --output-format=tensorflow-pb \\
                                    --output-model=out/tensorflow-pb{nchw_str}/{network}.pb \\
                                    {nchw_opt}""".format(
                network=network,
                nchw_str=prefer_nchw_str,
                nchw_opt=prefer_nchw_opt)
            print(command)
            convert.convert_using_command(command)

            activation_testing = int(
                os.environ.get('NNEF_ACTIVATION_TESTING', '1'))
            print("Activation testing is",
                  "ON" if activation_testing else "OFF")
            if activation_testing:
                import numpy as np
                import tensorflow as tf

                def normalize_shape(shape, default=1):
                    return [
                        int(dim.value) if dim.value is not None else default
                        for dim in shape.dims
                    ]

                tf.reset_default_graph()
                self._set_default_graph_from_pb(path)

                if isinstance(source_shape, (list, tuple)):
                    feed_dict = {
                        placeholder.name: np.random.random(source_shape)
                        for placeholder in get_placeholders()
                    }
                else:
                    feed_dict = {
                        placeholder.name: np.random.random(
                            normalize_shape(placeholder.shape,
                                            default=source_shape))
                        for placeholder in get_placeholders()
                    }
                old_names = [
                    placeholder.name for placeholder in get_placeholders()
                ]

                outputs = self._run_tfpb(path, feed_dict)

                tf.reset_default_graph()
                path2 = os.path.join('out', 'tensorflow-pb' + prefer_nchw_str,
                                     network + ".pb")
                self._set_default_graph_from_pb(path2)

                feed_dict2 = {
                    placeholder.name: feed_dict[old_names[i]]
                    for i, placeholder in enumerate(get_placeholders())
                }

                outputs2 = self._run_tfpb(path2, feed_dict2)

                if ignore_extra_outputs:
                    outputs2 = outputs2[-len(outputs):]

                self.assertTrue(len(outputs) == len(outputs2))
                for a, b in zip(outputs, outputs2):
                    if a.dtype == np.bool:
                        self.assertTrue(np.all(a == b))
                    else:
                        self.assertTrue(np.all(np.isfinite(a)))
                        self.assertTrue(np.all(np.isfinite(b)))
                        self.assertTrue(np.allclose(a, b, atol=1e-5))
Example #13
0
    def _test(self,
              fun,
              cmp=True,
              custom_tf_to_nnef_converters="",
              custom_nnef_to_tf_converters="",
              test_module="tests.activation.tf_py_layer_test_cases"):
        out_dir = os.path.join("out", fun.__name__)
        try:
            tf.reset_default_graph()
            tf.set_random_seed(0)

            network_outputs = fun()
            feed_dict = get_feed_dict()
            checkpoint_path = os.path.join("out", fun.__name__,
                                           "orig_checkpoint",
                                           fun.__name__ + ".ckpt")
            checkpoint_path = save_random_checkpoint(network_outputs,
                                                     checkpoint_path,
                                                     feed_dict)

            tf.reset_default_graph()
            tf.set_random_seed(0)

            compress_nnef = False
            command = """
                ./nnef_tools/convert.py --input-framework=tensorflow-py \\
                                        --output-framework=nnef \\
                                        --input-model={module}.{network}{checkpoint} \\
                                        --output-directory=out/{network}/nnef \\
                                        --custom-converters="{custom}" \\
                                        --permissive \\
                                        --io-transformation=SMART_TF_NHWC_TO_NCHW \\
                                        {compress}
                """.format(checkpoint=':' +
                           checkpoint_path if checkpoint_path else "",
                           network=fun.__name__,
                           custom=custom_tf_to_nnef_converters,
                           compress="--compress" if compress_nnef else "",
                           module=test_module)

            convert.convert_using_command(command)

            open(os.path.join("out", fun.__name__, "__init__.py"), "w").close()

            tf.reset_default_graph()
            tf.set_random_seed(0)
            fun()
            conv_info = conversion_info.load(
                os.path.join("out", fun.__name__, "nnef", "conversion.json"))
            tf_activation_exporter.export(output_path=os.path.join(
                "out", fun.__name__, "nnef", "activations"),
                                          feed_dict=feed_dict,
                                          conversion_info=conv_info,
                                          checkpoint_path=checkpoint_path,
                                          verbose=False)

            prefer_nhwc_options = [True]
            if tf_has_cuda_gpu():
                prefer_nhwc_options += [False]
            for prefer_nhwc in prefer_nhwc_options:
                print("Converting to TensorFlow {}".format(
                    "NHWC" if prefer_nhwc else "NCHW"))
                data_format_str = ("nhwc" if prefer_nhwc else "nchw")
                tf_output_dir = os.path.join("out", fun.__name__,
                                             "tf_" + data_format_str)
                command = """
                    ./nnef_tools/convert.py --input-framework=nnef \\
                                            --output-framework=tensorflow-py \\
                                            --input-model=out/{network}/nnef/model{tgz} \\
                                            --output-directory={output} \\
                                            --io-transformation=SMART_NCHW_TO_TF_NHWC \\
                                            --custom-converters="{custom}" \\
                                            --permissive
                    """.format(network=fun.__name__,
                               custom=custom_nnef_to_tf_converters,
                               tgz=".nnef.tgz" if compress_nnef else "",
                               output=tf_output_dir)
                convert.convert_using_command(command)

                open(os.path.join(tf_output_dir, "__init__.py"), "w").close()
                open(os.path.join(tf_output_dir, "model", "__init__.py"),
                     "w").close()

                with open(os.path.join(tf_output_dir, "model",
                                       "model.py")) as f:
                    tf_src = f.read()

                # noinspection PyProtectedMember
                new_net_fun = tf_py_io._tfsource_to_function(
                    tf_src, fun.__name__)

                conv_info_tf_to_nnef = conversion_info.load(
                    os.path.join(out_dir, "nnef", "conversion.json"))
                conv_info_nnef_to_tf = conversion_info.load(
                    os.path.join(tf_output_dir, "conversion.json"))
                conv_info_tf_to_tf = conversion_info.compose(
                    conv_info_tf_to_nnef, conv_info_nnef_to_tf)

                conversion_info.dump(
                    conv_info_tf_to_tf,
                    os.path.join(tf_output_dir, "conv_info_tf_to_tf.json"))

                feed_dict2 = activation_test.transform_feed_dict(
                    feed_dict, conv_info_tf_to_tf)
                nnef2_out_dir = os.path.join(out_dir,
                                             "nnef_from_tf_" + data_format_str)

                tf.reset_default_graph()
                tf.set_random_seed(0)

                command = """
                    ./nnef_tools/convert.py --input-framework=tensorflow-py \\
                                            --output-framework=nnef \\
                                            --input-model={input}{checkpoint} \\
                                            --output-directory={output} \\
                                            --custom-converters="{custom}" \\
                                            --permissive \\
                                            --io-transformation=SMART_TF_NHWC_TO_NCHW \\
                                            {compress}
                    """.format(checkpoint=(
                    ':' +
                    (os.path.join(tf_output_dir, "model", "checkpoint",
                                  "model.ckpt") if checkpoint_path else "")),
                               input=tf_output_dir.replace('/', '.') +
                               ".model.model." + fun.__name__,
                               custom=custom_tf_to_nnef_converters,
                               compress="--compress" if compress_nnef else "",
                               output=nnef2_out_dir)

                convert.convert_using_command(command)

                conv_info_tf_to_nnef2 = conversion_info.load(
                    os.path.join(out_dir, "nnef_from_tf_" + data_format_str,
                                 "conversion.json"))
                conv_info_nnef_to_nnef = conversion_info.compose(
                    conv_info_nnef_to_tf, conv_info_tf_to_nnef2)
                conversion_info.dump(
                    conv_info_nnef_to_nnef,
                    os.path.join(nnef2_out_dir, "conv_info_nnef_to_nnef.json"))

                tf.reset_default_graph()
                tf.set_random_seed(0)
                new_net_fun()
                tf_activation_exporter.export(
                    output_path=os.path.join(nnef2_out_dir, "activations"),
                    feed_dict=feed_dict2,
                    conversion_info=conv_info_tf_to_nnef2,
                    checkpoint_path=(os.path.join(tf_output_dir, "model",
                                                  "checkpoint", "model.ckpt")
                                     if checkpoint_path else None),
                    verbose=False)

                if cmp:
                    activation_test.compare_activation_dirs(
                        os.path.join(out_dir, "nnef", "activations"),
                        os.path.join(out_dir,
                                     "nnef_from_tf_" + data_format_str,
                                     "activations"),
                        conv_info_nnef_to_nnef,
                        verbose=False)
        finally:
            if DELETE_DATS_AND_CHECKPOINTS:
                dat_files = recursive_glob(out_dir, "*.dat")
                checkpoints = recursive_glob(out_dir, "*ckpt*")
                for file_name in set(dat_files + checkpoints):
                    os.remove(file_name)
    def _test_network(path, source_shape=1):
        assert utils.is_anyint(source_shape) or isinstance(
            source_shape, (list, tuple))

        network = os.path.basename(path.rsplit('.', 1)[0])
        command = """
        ./nnef_tools/convert.py --input-format=tensorflow-pb \\
                                --input-model={path} \\
                                --output-format=nnef \\
                                --output-model=out/nnef/{network}.nnef.tgz \\
                                --compress \\
                                --conversion-info \\
                                --input-shape="{shape}" """.format(
            path=path, network=network, shape=source_shape)
        print(command)
        convert.convert_using_command(command)

        for prefer_nchw in [False, True]:
            print("Prefer NCHW" if prefer_nchw else "Prefer NHWC")

            prefer_nchw_opt = "--prefer-nchw" if prefer_nchw else ""
            prefer_nchw_str = "_prefer_nchw" if prefer_nchw else ""

            command = """
            ./nnef_tools/convert.py --input-format=nnef \\
                                    --input-model=out/nnef/{network}.nnef.tgz \\
                                    --output-format=tensorflow-pb \\
                                    --output-model=out/tensorflow-pb{nchw_str}/{network}.pb \\
                                    --conversion-info \\
                                    {nchw_opt}""".format(
                network=network,
                nchw_str=prefer_nchw_str,
                nchw_opt=prefer_nchw_opt)
            print(command)
            convert.convert_using_command(command)

            command = """
            ./nnef_tools/convert.py --input-format=tensorflow-pb \\
                                    --input-model=out/tensorflow-pb{nchw_str}/{network}.pb \\
                                    --output-format=nnef \\
                                    --output-model=out/nnef2{nchw_str}/{network}.nnef.tgz \\
                                    --compress \\
                                    --conversion-info""".format(
                network=network, nchw_str=prefer_nchw_str)
            print(command)
            convert.convert_using_command(command)

            activation_testing = int(
                os.environ.get('NNEF_ACTIVATION_TESTING', '1'))
            print("Activation testing is",
                  "ON" if activation_testing else "OFF")
            if activation_testing:
                import numpy as np
                import tensorflow as tf
                from nnef_tools import export_activation
                from nnef_tools.activation_export.tensorflow import tf_activation_exporter

                def normalize_shape(shape, default=1):
                    return [
                        int(dim.value) if dim.value is not None else default
                        for dim in shape.dims
                    ]

                tf.reset_default_graph()
                export_activation.tf_set_default_graph_from_pb(path)

                if isinstance(source_shape, (list, tuple)):
                    feed_dict = {
                        placeholder.name: np.random.random(source_shape)
                        for placeholder in get_placeholders()
                    }
                else:
                    feed_dict = {
                        placeholder.name: np.random.random(
                            normalize_shape(placeholder.shape,
                                            default=source_shape))
                        for placeholder in get_placeholders()
                    }

                conv_info_tf_to_nnef = conversion_info.load(
                    os.path.join("out", 'nnef',
                                 network + ".nnef.tgz.conversion.json"))

                tf_activation_exporter.export(
                    output_path=os.path.join("out", 'nnef', network +
                                             ".nnef.tgz.activations"),
                    feed_dict=feed_dict,
                    conversion_info=conv_info_tf_to_nnef,
                    input_output_only=True,
                    verbose=False)

                conv_info_nnef_to_tf = conversion_info.load(
                    os.path.join('out', 'tensorflow-pb' + prefer_nchw_str,
                                 network + ".pb.conversion.json"))

                conv_info_tf_to_tf = conversion_info.compose(
                    conv_info_tf_to_nnef, conv_info_nnef_to_tf)

                feed_dict2 = activation_test.transform_feed_dict(
                    feed_dict, conv_info_tf_to_tf)

                conv_info_tf_to_nnef2 = conversion_info.load(
                    os.path.join('out', 'nnef2' + prefer_nchw_str,
                                 network + ".nnef.tgz.conversion.json"))
                conv_info_nnef_to_nnef = conversion_info.compose(
                    conv_info_nnef_to_tf, conv_info_tf_to_nnef2)

                tf.reset_default_graph()
                export_activation.tf_set_default_graph_from_pb(
                    os.path.join('out', 'tensorflow-pb' + prefer_nchw_str,
                                 network + ".pb"))

                tf_activation_exporter.export(
                    output_path=os.path.join("out", 'nnef2' + prefer_nchw_str,
                                             network +
                                             ".nnef.tgz.activations"),
                    feed_dict=feed_dict2,
                    conversion_info=conv_info_tf_to_nnef2,
                    input_output_only=True,
                    verbose=False)

                activation_test.compare_activation_dirs(
                    os.path.join("out", 'nnef',
                                 network + ".nnef.tgz.activations"),
                    os.path.join("out", 'nnef2' + prefer_nchw_str,
                                 network + ".nnef.tgz.activations"),
                    conv_info_nnef_to_nnef,
                    verbose=False)
    def _test(self,
              fun,
              cmp=True,
              custom_tf_to_nnef_converters="",
              custom_nnef_to_tf_converters="",
              test_module="nnef_tests.conversion.tf_py_layer_test_cases"):

        activation_testing = int(os.environ.get('NNEF_ACTIVATION_TESTING',
                                                '1'))
        print("Activation testing is", "ON" if activation_testing else "OFF")

        out_dir = os.path.join("out", fun.__name__)
        try:
            tf.reset_default_graph()
            tf.set_random_seed(0)

            network_outputs = fun()
            feed_dict = get_feed_dict()
            checkpoint_path = os.path.join("out", fun.__name__,
                                           "orig_checkpoint",
                                           fun.__name__ + ".ckpt")
            checkpoint_path = save_random_checkpoint(network_outputs,
                                                     checkpoint_path,
                                                     feed_dict)

            tf.reset_default_graph()
            tf.set_random_seed(0)

            compress_nnef = False
            command = """
                ./nnef_tools/convert.py --input-format tensorflow-py \\
                                        --output-format nnef \\
                                        --input-model {module}.{network} {checkpoint} \\
                                        --output-model out/{network}/{network}.nnef{tgz} \\
                                        --custom-converters {custom} \\
                                        --permissive \\
                                        --io-transformation SMART_TF_NHWC_TO_NCHW \\
                                        --conversion-info \\
                                        {compress}
                """.format(
                checkpoint=checkpoint_path if checkpoint_path else "",
                network=fun.__name__,
                custom=" ".join(custom_tf_to_nnef_converters),
                compress="--compress" if compress_nnef else "",
                module=test_module,
                tgz=".tgz" if compress_nnef else "")

            convert.convert_using_command(command)

            if activation_testing:
                tf.reset_default_graph()
                tf.set_random_seed(0)
                fun()
                conv_info = conversion_info.load(
                    os.path.join("out", fun.__name__,
                                 fun.__name__ + ".nnef.conversion.json"))

                tf_activation_exporter.export(output_path=os.path.join(
                    "out", fun.__name__, fun.__name__ + ".nnef",
                    "activations"),
                                              feed_dict=feed_dict,
                                              conversion_info=conv_info,
                                              checkpoint_path=checkpoint_path,
                                              verbose=False)

            prefer_nhwc_options = [True]
            if tf_has_cuda_gpu():
                prefer_nhwc_options += [False]
            for prefer_nhwc in prefer_nhwc_options:
                print("Converting to TensorFlow {}".format(
                    "NHWC" if prefer_nhwc else "NCHW"))
                data_format_str = ("nhwc" if prefer_nhwc else "nchw")
                tf_output_path = os.path.join(
                    "out", fun.__name__,
                    fun.__name__ + '_' + data_format_str + '.py')
                command = """
                    ./nnef_tools/convert.py --input-format nnef \\
                                            --output-format tensorflow-py \\
                                            --input-model out/{network}/{network}.nnef{tgz} \\
                                            --output-model {output} \\
                                            --io-transformation SMART_NCHW_TO_TF_NHWC \\
                                            --custom-converters {custom} \\
                                            --permissive \\
                                            --conversion-info
                    """.format(network=fun.__name__,
                               custom=" ".join(custom_nnef_to_tf_converters),
                               tgz=".nnef.tgz" if compress_nnef else "",
                               output=tf_output_path)
                convert.convert_using_command(command)

                with open(os.path.join(tf_output_path), 'r') as f:
                    tf_src = f.read()

                # noinspection PyProtectedMember
                new_net_fun = tf_py_io._tfsource_to_function(
                    tf_src, fun.__name__)

                conv_info_tf_to_nnef = conversion_info.load(
                    os.path.join("out", fun.__name__,
                                 fun.__name__ + ".nnef.conversion.json"))
                conv_info_nnef_to_tf = conversion_info.load(
                    os.path.join(tf_output_path + ".conversion.json"))
                conv_info_tf_to_tf = conversion_info.compose(
                    conv_info_tf_to_nnef, conv_info_nnef_to_tf)

                conversion_info.dump(
                    conv_info_tf_to_tf,
                    os.path.join(tf_output_path + ".conv_info_tf_to_tf.json"))

                feed_dict2 = activation_test.transform_feed_dict(
                    feed_dict, conv_info_tf_to_tf)
                nnef2_out_dir = os.path.join(tf_output_path + ".nnef")

                tf.reset_default_graph()
                tf.set_random_seed(0)

                command = """
                    ./nnef_tools/convert.py --input-format tensorflow-py \\
                                            --output-format nnef \\
                                            --input-model {input} {checkpoint} \\
                                            --output-model {output} \\
                                            --custom-converters {custom} \\
                                            --permissive \\
                                            --io-transformation SMART_TF_NHWC_TO_NCHW \\
                                            --conversion-info \\
                                            {compress}
                    """.format(
                    checkpoint=(os.path.join(tf_output_path + ".checkpoint")
                                if checkpoint_path else ""),
                    input=tf_output_path.replace('/', '.')[:-len('.py')] +
                    "." + fun.__name__,
                    custom=" ".join(custom_tf_to_nnef_converters),
                    compress="--compress" if compress_nnef else "",
                    output=nnef2_out_dir)

                convert.convert_using_command(command)

                conv_info_tf_to_nnef2 = conversion_info.load(
                    nnef2_out_dir + ".conversion.json")
                conv_info_nnef_to_nnef = conversion_info.compose(
                    conv_info_nnef_to_tf, conv_info_tf_to_nnef2)
                conversion_info.dump(
                    conv_info_nnef_to_nnef,
                    os.path.join(nnef2_out_dir +
                                 ".conv_info_nnef_to_nnef.json"))

                if activation_testing:
                    tf.reset_default_graph()
                    tf.set_random_seed(0)
                    new_net_fun()
                    tf_activation_exporter.export(
                        output_path=os.path.join(nnef2_out_dir, "activations"),
                        feed_dict=feed_dict2,
                        conversion_info=conv_info_tf_to_nnef2,
                        checkpoint_path=(os.path.join(tf_output_path +
                                                      ".checkpoint")
                                         if checkpoint_path else None),
                        verbose=False)

                    if cmp:
                        activation_test.compare_activation_dirs(
                            os.path.join("out", fun.__name__,
                                         fun.__name__ + ".nnef",
                                         "activations"),
                            os.path.join(nnef2_out_dir, "activations"),
                            conv_info_nnef_to_nnef,
                            verbose=False)
        finally:
            if self.delete_dats_and_checkpoints:
                dat_files = utils.recursive_glob(out_dir, "*.dat")
                checkpoints = utils.recursive_glob(out_dir, "*ckpt*")
                for file_name in set(dat_files + checkpoints):
                    os.remove(file_name)