Example #1
0
def get_reader(input_format,
               output_format,
               input_shape=None,
               permissive=False,
               with_weights=True,
               custom_converters=None):
    if input_format == 'nnef':
        from nnef_tools.io.nnef.nnef_io import Reader

        configs = [NNEFParserConfig.STANDARD_CONFIG]

        if output_format in [
                'tensorflow-pb', 'tensorflow-py', 'tensorflow-lite'
        ]:
            from nnef_tools.conversion.tensorflow import nnef_to_tf
            configs.append(nnef_to_tf.ParserConfig)
        elif output_format in ['onnx']:
            from nnef_tools.conversion.onnx import nnef_to_onnx
            configs.append(nnef_to_onnx.ParserConfig)
        elif output_format in ['caffe']:
            from nnef_tools.conversion.caffe import nnef_to_caffe
            configs.append(nnef_to_caffe.ParserConfig)
        else:
            assert False

        configs += NNEFParserConfig.load_configs(custom_converters,
                                                 load_standard=False)

        return Reader(parser_configs=configs,
                      unify=(output_format in ['caffe']))
    elif input_format == 'tensorflow-pb':
        # TODO custom converter
        from nnef_tools.io.tensorflow.tf_pb_io import Reader
        return Reader(convert_to_tf_py=True, input_shape=input_shape)
    elif input_format == 'tensorflow-py':
        from nnef_tools.io.tensorflow.tf_py_io import Reader
        if custom_converters:
            custom_traceable_functions = get_tf_py_custom_traceable_functions(
                custom_converters)
        else:
            custom_traceable_functions = None
        return Reader(expand_gradients=True,
                      custom_traceable_functions=custom_traceable_functions)
    elif input_format == 'tensorflow-lite':
        # TODO custom converter
        from nnef_tools.io.tensorflow.tflite_io import Reader
        return Reader(convert_to_tf_py=True)
    elif input_format == 'onnx':
        # TODO custom converter
        from nnef_tools.io.onnx.onnx_io import Reader
        return Reader(propagate_shapes=True, input_shape=input_shape)
    elif input_format == 'caffe':
        # TODO custom converter
        from nnef_tools.io.caffe.caffe_io import Reader
        return Reader()
    else:
        assert False
Example #2
0
def read(path, parser_configs=None):
    # type: (str, typing.Optional[typing.List[NNEFParserConfig]])->NNEFGraph

    if not (path.endswith('.tgz') or path.endswith('.nnef')
            or path.endswith('.txt') or os.path.isdir(path)):
        raise utils.NNEFToolsException(
            "Only .tgz or .nnef or .txt files or directories are supported")

    parser_config = NNEFParserConfig.combine_configs(
        parser_configs if parser_configs else [])

    path_to_load = None
    compressed = False

    try:
        if os.path.isdir(path):
            compressed = False
            with_weights = True
            path_to_load = path
        elif path.endswith('.tgz'):
            compressed = True
            with_weights = True
            path_to_load = tempfile.mkdtemp(prefix="nnef_")
            utils.tgz_extract(path, path_to_load)
        elif path.endswith('.nnef') or path.endswith('.txt'):
            compressed = False
            with_weights = False
            path_to_load = path
        else:
            assert False

        # If there are fragments in the graph and also in parser_config
        # we remove the non-standard fragments from parser_config to avoid duplicate fragment definition
        if parser_config.fragments:
            re_graph = re.compile(r"^graph\s|\sgraph\s")
            re_fragment = re.compile(r"^fragment\s|\sfragment\s")
            graph_nnef_path = os.path.join(
                path_to_load,
                'graph.nnef') if os.path.isdir(path_to_load) else path_to_load
            with open(graph_nnef_path, 'r') as f:
                while True:
                    line = f.readline()
                    if not line:
                        break
                    if re_fragment.search(line):
                        parser_config.fragments = NNEFParserConfig.STANDARD_CONFIG.fragments
                        break
                    if re_graph.search(line):
                        break

        return _read(parser_graph=parser_config.infer_shapes(
            parser_config.load_graph(path_to_load)),
                     with_weights=with_weights)

    finally:
        if compressed and path_to_load:
            shutil.rmtree(path_to_load)
Example #3
0
def get_writer(input_format,
               output_format,
               compress,
               with_weights=True,
               custom_converters=None):
    if output_format == 'nnef':
        from nnef_tools.io.nnef.nnef_io import Writer
        fragments = NNEFParserConfig.combine_configs(
            NNEFParserConfig.load_configs(custom_converters,
                                          load_standard=False)).fragments
        return Writer(write_weights=with_weights,
                      fragments=fragments,
                      only_print_used_fragments=True,
                      compression_level=compress if compress >= 0 else 0)
    elif output_format == 'tensorflow-py':
        from nnef_tools.io.tensorflow.tf_py_io import Writer

        if custom_converters:
            custom_imports, custom_op_protos = get_tf_py_imports_and_op_protos(
                custom_converters)
        else:
            custom_imports, custom_op_protos = None, None

        return Writer(write_weights=with_weights,
                      custom_op_protos=custom_op_protos,
                      custom_imports=custom_imports)
    elif output_format == 'tensorflow-pb':
        from nnef_tools.io.tensorflow.tf_pb_io import Writer
        return Writer(convert_from_tf_py=True)
    elif output_format == 'tensorflow-lite':
        from nnef_tools.io.tensorflow.tflite_io import Writer
        return Writer(convert_from_tf_py=True)
    elif output_format == 'onnx':
        from nnef_tools.io.onnx.onnx_io import Writer
        return Writer()
    elif output_format == 'caffe':
        from nnef_tools.io.caffe.caffe_io import Writer
        return Writer()
    elif output_format == 'caffe2':
        from nnef_tools.io.caffe2.caffe2_io import Writer
        return Writer()
    else:
        assert False
Example #4
0
def main():
    try:
        args = get_args(sys.argv)

        if not args.output:
            if sys.stdout.isatty():
                raise utils.NNEFToolsException("No output provided.")
            utils.set_stdout_to_binary()

        args.params = InputSources(args.params)

        if args.seed != -1:
            np.random.seed(args.seed)

        parser_configs = NNEFParserConfig.load_configs(args.custom_operations,
                                                       load_standard=True)
        reader = nnef_io.Reader(parser_configs=parser_configs,
                                input_shape=args.shape)

        # read without weights
        graph = reader(
            os.path.join(args.network, 'graph.nnef') if os.path.
            isdir(args.network) else args.network)

        inputs = tuple(
            args.params.create_input(name=input.name,
                                     np_dtype=input.get_numpy_dtype(),
                                     shape=input.shape,
                                     allow_bigger_batch=True)
            for input in graph.inputs)

        if args.output:
            for tensor, array in zip(graph.inputs, inputs):
                nnef_io.write_nnef_tensor(
                    os.path.join(args.output, tensor.name + '.dat'), array)
        else:
            for array in inputs:
                nnef.write_tensor(sys.stdout, array)
    except Exception as e:
        print('Error: {}'.format(e), file=sys.stderr)
        exit(1)
Example #5
0
def read(path, parser_configs=None):
    # type: (str, typing.Optional[typing.List[NNEFParserConfig]])->NNEFGraph

    assert path.endswith('.tgz') or path.endswith('.nnef') or os.path.isdir(path), \
        "Only .tgz or .nnef files or directories are supported"

    parser_config = NNEFParserConfig.combine_configs(
        parser_configs if parser_configs else [])

    path_to_load = None
    compressed = False

    try:
        if os.path.isdir(path):
            compressed = False
            with_weights = True
            path_to_load = path
        elif path.endswith('.tgz'):
            compressed = True
            with_weights = True
            path_to_load = tempfile.mkdtemp(prefix="nnef_")
            _tgz_extract(path, path_to_load)
        elif path.endswith('.nnef'):
            compressed = False
            with_weights = False
            path_to_load = path
        else:
            assert False

        return _read(parser_graph=parser_config.infer_shapes(
            parser_config.load_graph(path_to_load)),
                     with_weights=with_weights)

    finally:
        if compressed and path_to_load:
            shutil.rmtree(path_to_load)
Example #6
0
def read(
    path,  # type: str
    parser_configs=None,  # type: typing.Optional[typing.List[NNEFParserConfig]]
    input_shape=None,  # type: typing.Union[None, typing.List[int], typing.Dict[str, typing.List[int]]]
):
    # type: (...)->NNEFGraph

    if not (path.endswith('.tgz') or path.endswith('.nnef')
            or path.endswith('.txt') or os.path.isdir(path)):
        raise utils.NNEFToolsException(
            "Only .tgz or .nnef or .txt files or directories are supported")

    parser_config = NNEFParserConfig.combine_configs(
        parser_configs if parser_configs else [])

    path_to_load = None
    compressed = False

    try:
        if os.path.isdir(path):
            compressed = False
            with_weights = True
            path_to_load = path
        elif path.endswith('.tgz'):
            compressed = True
            with_weights = True
            path_to_load = tempfile.mkdtemp(prefix="nnef_")
            utils.tgz_extract(path, path_to_load)
        elif path.endswith('.nnef') or path.endswith('.txt'):
            compressed = False
            with_weights = False
            path_to_load = path
        else:
            assert False

        # If there are fragments in the graph and also in parser_config
        # we remove the non-standard fragments from parser_config to avoid duplicate fragment definition
        if parser_config.fragments:
            re_graph = re.compile(r"^graph\s|\sgraph\s")
            re_fragment = re.compile(r"^fragment\s|\sfragment\s")
            graph_nnef_path = os.path.join(
                path_to_load,
                'graph.nnef') if os.path.isdir(path_to_load) else path_to_load
            with open(graph_nnef_path, 'r') as f:
                while True:
                    line = f.readline()
                    if not line:
                        break
                    if re_fragment.search(line):
                        parser_config.fragments = NNEFParserConfig.STANDARD_CONFIG.fragments
                        break
                    if re_graph.search(line):
                        break

        parser_graph = parser_config.load_graph(path_to_load)

        if input_shape is not None:
            if not isinstance(input_shape, (list, dict)):
                raise utils.NNEFToolsException(
                    "input_shape must be list or dict")

            for op in parser_graph.operations:
                if op.name == 'external':
                    if isinstance(input_shape, dict):
                        name = op.outputs['output']
                        if name in input_shape:
                            op.attribs['shape'] = input_shape[name]
                    else:
                        op.attribs['shape'] = input_shape

        parser_config.infer_shapes(parser_graph)
        return _read(parser_graph=parser_graph, with_weights=with_weights)

    finally:
        if compressed and path_to_load:
            shutil.rmtree(path_to_load)
Example #7
0
    NONATOMIC,
    'l1_normalization':
    NONATOMIC,
    'l2_normalization':
    NONATOMIC,
    'batch_normalization':
    convert_batch_normalization,
    'avg_roi_pool':
    UNSUPPORTED,
    'max_roi_pool':
    UNSUPPORTED,  # maybe supported
    'roi_resample':
    UNSUPPORTED,
    'avg_roi_align':
    UNSUPPORTED,
    'max_roi_align':
    UNSUPPORTED,
    'linear_quantize':
    NONATOMIC,
    'logarithmic_quantize':
    NONATOMIC,
    'copy_n':
    convert_copy_n,
}

# TODO add to class as static(?) method
# NNEF must be parsed with this before calling nnef_to_tf.Converter on it
ParserConfig = NNEFParserConfig(lowered=[
    k for k, v in six.iteritems(_DefaultConverters) if v is NONATOMIC
])
Example #8
0
    'select': convert_select,
    'conv': convert_conv,
    'deconv': convert_deconv,
    'split': convert_split,
    'linear': convert_linear,
    'reshape': convert_reshape,
    'leaky_relu': convert_leaky_relu,
    'prelu': convert_prelu,
    'local_response_normalization': convert_local_response_normalization,
    'matmul': convert_matmul,
    'transpose': convert_transpose,
    'add_n': convert_add_n,
    'softmax': convert_softmax,
    'batch_normalization': convert_batch_normalization,
    'squeeze': convert_squeeze,
    'unsqueeze': convert_unsqueeze,
    'max_pool_with_index': convert_max_pool_with_index,
    'clamp': convert_clamp,
    'slice': convert_slice,
    'nearest_upsample': convert_nearest_upsample,
    'nearest_downsample': convert_nearest_downsample,

    "sin": partial(generic_convert_unary, target_name='Sin'),
    "cos": partial(generic_convert_unary, target_name='Cos'),
    "tile": convert_tile,
    "pad": convert_pad,
}

# NNEF must be parsed with this before calling nnef_to_caffe.Converter on it
ParserConfig = NNEFParserConfig(lowered=[k for k, v in six.iteritems(_StandardConverters) if v is NONATOMIC])
Example #9
0
def main():
    try:
        args = get_args(sys.argv)

        args.params = InputSources(args.params)

        if args.seed != -1:
            np.random.seed(args.seed)

        parent_dir_of_input_model = os.path.dirname(
            utils.path_without_trailing_separator(args.network))

        tmp_dir = None
        if args.network.endswith('.tgz'):
            nnef_path = tmp_dir = tempfile.mkdtemp(
                prefix="nnef_", dir=parent_dir_of_input_model)
            utils.tgz_extract(args.network, nnef_path)
        else:
            nnef_path = args.network

        try:
            parser_configs = NNEFParserConfig.load_configs(
                args.custom_operations, load_standard=True)
            reader = nnef_io.Reader(parser_configs=parser_configs)

            # read without weights
            graph = reader(
                os.path.join(nnef_path, 'graph.nnef') if os.path.
                isdir(nnef_path) else nnef_path)
            if os.path.isdir(nnef_path):
                output_path = nnef_path
            elif nnef_path.endswith('.nnef') or nnef_path.endswith('.txt'):
                output_path = tmp_dir = tempfile.mkdtemp(
                    prefix="nnef_", dir=parent_dir_of_input_model)
            else:
                assert False

            did_generate_weights = generate_weights(graph,
                                                    nnef_path,
                                                    output_path,
                                                    input_sources=args.params)
            nnef_path = output_path

            if tmp_dir and did_generate_weights:
                if args.network.endswith('.tgz'):
                    print("Info: Changing input archive", file=sys.stderr)
                    shutil.move(args.network,
                                args.network + '.nnef-tools-backup')
                    utils.tgz_compress(dir_path=nnef_path,
                                       file_path=args.network)
                    os.remove(args.network + '.nnef-tools-backup')
                else:
                    output_path = args.network.rsplit('.', 1)[0] + '.nnef.tgz'
                    backup_path = output_path + '.nnef-tools-backup'
                    if os.path.exists(output_path):
                        shutil.move(output_path, backup_path)
                    utils.tgz_compress(dir_path=nnef_path,
                                       file_path=output_path)
                    if os.path.exists(backup_path):
                        os.remove(backup_path)
        finally:
            if tmp_dir:
                shutil.rmtree(tmp_dir)
    except Exception as e:
        print('Error: {}'.format(e), file=sys.stderr)
        exit(1)
Example #10
0
def run_using_argv(argv):
    try:
        args = get_args(argv)
        write_outputs = args.output_names is None or args.output_names

        if args.input is None:
            if sys.stdin.isatty():
                raise utils.NNEFToolsException("No input provided!")
            utils.set_stdin_to_binary()

        if write_outputs:
            if args.output is None:
                if sys.stdout.isatty():
                    raise utils.NNEFToolsException("No output provided!")
                utils.set_stdout_to_binary()

        parent_dir_of_input_model = os.path.dirname(
            utils.path_without_trailing_separator(args.network))
        tmp_dir = None

        if args.network.endswith('.tgz'):
            nnef_path = tmp_dir = tempfile.mkdtemp(
                prefix="nnef_", dir=parent_dir_of_input_model)
            utils.tgz_extract(args.network, nnef_path)
        else:
            nnef_path = args.network

        try:
            parser_configs = NNEFParserConfig.load_configs(
                args.custom_operations, load_standard=True)

            # read without weights
            reader = nnef_io.Reader(parser_configs=parser_configs,
                                    infer_shapes=False)
            graph = reader(
                os.path.join(nnef_path, 'graph.nnef') if os.path.
                isdir(nnef_path) else nnef_path)

            if args.input is None:
                inputs = tuple(
                    nnef.read_tensor(sys.stdin)
                    for _ in range(len(graph.inputs)))
            elif len(args.input) == 1 and os.path.isdir(args.input[0]):
                inputs = tuple(
                    nnef_io.read_nnef_tensor(
                        os.path.join(args.input[0], tensor.name + '.dat'))
                    for tensor in graph.inputs)
            else:
                inputs = tuple(
                    nnef_io.read_nnef_tensor(path) for path in args.input)

            reader = nnef_io.Reader(parser_configs=parser_configs,
                                    input_shape=tuple(
                                        list(input.shape) for input in inputs))

            graph = reader(nnef_path)

            tensor_hooks = []

            stats_hook = None
            if args.stats:
                stats_hook = backend.StatisticsHook()
                tensor_hooks.append(stats_hook)

            if write_outputs and args.output_names is not None:
                if '*' in args.output_names:
                    tensor_hooks.append(
                        backend.ActivationExportHook(
                            tensor_names=[
                                t.name for t in graph.tensors
                                if not t.is_constant and not t.is_variable
                            ],
                            output_directory=args.output))
                else:
                    tensor_hooks.append(
                        backend.ActivationExportHook(
                            tensor_names=args.output_names,
                            output_directory=args.output))

            if args.permissive:
                backend.try_to_fix_unsupported_attributes(graph)

            outputs = backend.run(nnef_graph=graph,
                                  inputs=inputs,
                                  device=args.device,
                                  custom_operations=get_custom_runners(
                                      args.custom_operations),
                                  tensor_hooks=tensor_hooks)

            if write_outputs and args.output_names is None:
                if args.output is None:
                    for array in outputs:
                        nnef.write_tensor(sys.stdout, array)
                else:
                    for tensor, array in zip(graph.outputs, outputs):
                        nnef_io.write_nnef_tensor(
                            os.path.join(args.output, tensor.name + '.dat'),
                            array)

            if stats_hook:
                if args.stats.endswith('/') or args.stats.endswith('\\'):
                    stats_path = os.path.join(nnef_path, args.stats,
                                              'graph.stats')
                else:
                    stats_path = os.path.join(nnef_path, args.stats)
                stats_hook.save_statistics(stats_path)

            if tmp_dir and (args.stats and _is_inside(nnef_path, args.stats)):
                if args.network.endswith('.tgz'):
                    print("Info: Changing input archive", file=sys.stderr)
                    shutil.move(args.network,
                                args.network + '.nnef-tools-backup')
                    utils.tgz_compress(dir_path=nnef_path,
                                       file_path=args.network)
                    os.remove(args.network + '.nnef-tools-backup')
                else:
                    output_path = args.network.rsplit('.', 1)[0] + '.nnef.tgz'
                    backup_path = output_path + '.nnef-tools-backup'
                    if os.path.exists(output_path):
                        shutil.move(output_path, backup_path)
                    utils.tgz_compress(dir_path=nnef_path,
                                       file_path=output_path)
                    if os.path.exists(backup_path):
                        os.remove(backup_path)
        finally:
            if tmp_dir:
                shutil.rmtree(tmp_dir)
    except utils.NNEFToolsException as e:
        print("Error: " + str(e), file=sys.stderr)
        exit(1)
    except nnef.Error as e:
        print("Error: " + str(e), file=sys.stderr)
        exit(1)