def parse_arg_proto(s): is_tensor = False is_optional = False is_array = False if s.endswith('?'): is_optional = True s = s[:-1] if s.endswith('[]'): is_array = True s = s[:-2] if ':' in s: s, t = s.split(':', 1) t = t.strip() assert t == "T" is_tensor = True arg_names = list(t.strip() for t in s.split('/')) assert all(utils.is_identifier(n) for n in arg_names), "{}".format(arg_names) return ArgProto(arg_names=arg_names, is_tensor=is_tensor, is_array=is_array, is_optional=is_optional)
def parse_op_proto(s): assert s and s[-1] == ')' s = s[:-1] op_name, args = s.split('(', 1) arg_protos = [] for arg in args.split(','): arg = arg.strip() assert arg arg_protos.append(parse_arg_proto(arg)) assert utils.is_identifier(op_name.replace('.', '_')) return OpProto(op_name=op_name, arg_protos=arg_protos)
def visit(output_, path): # type: (typing.Any, str)->None if isinstance(output_, tf.Variable): output_ = output_.value() if isinstance(output_, tf.Tensor): path = path[1:] if not path: path = "output" elif path[0].isdigit(): path = "output" + path assert utils.is_identifier(path), \ "Bad name_override '{}' for tensor {}. " \ "Please use valid identifiers as keys in the dict(s) " \ "returned by your network function.".format(path, output_.name) outputs_dict[path] = output_
def convert_tf_pb_to_nnef( # Main parameters file_name, output_directory, network_name, source_shapes=None, source_dtypes=None, compress=False, # Extra parameters verbose=False, allow_extensions=False, allow_gradients=False, optimization_level=None, io_transform=None, activation_export_feed_dict=None, activation_export_io_only=False, overwrite=False, # Module level parameters converter_allow_imprecise_image_resize=False, optimizer_remove_unneeded_copies=None, optimizer_remove_inverse_transposes=None, optimizer_merge_transforms_into_variables=None, optimizer_merge_transforms_into_constants=None): # type: (...)->None assert utils.is_identifier(network_name), \ "Network name must be None or a valid python identifier" if os.path.exists(output_directory): if overwrite: shutil.rmtree(output_directory) else: assert False, "{} exists, delete it or use overwrite=True".format( output_directory) g = tf_pb_io.read_tf_graph_from_protobuf(file_name) tf_pb_to_tf_py.evaluate_and_convert(tf_graph=g, source_shapes=source_shapes, source_dtypes=source_dtypes) converter = tf_to_nnef.Converter( enable_imprecise_image_resize=converter_allow_imprecise_image_resize) h, conv_info = converter(g) h.name = network_name conversion_info.dump(conv_info, os.path.join(output_directory, "step1.json")) nnef_io.write( h, os.path.join( output_directory, network_name + ("_not_optimized.nnef.tgz" if compress else "_not_optimized_nnef"))) opt_info = _call_optimizer( optimizer=nnef_data_format_optimizer, graph=h, custom_transposable_ops=None, io_transform=io_transform, verbose=verbose, rename_tensors=True, optimization_level=optimization_level, remove_unneeded_copies=optimizer_remove_unneeded_copies, remove_inverse_transposes=optimizer_remove_inverse_transposes, merge_transforms_into_variables= optimizer_merge_transforms_into_variables, merge_transforms_into_constants= optimizer_merge_transforms_into_constants) conversion_info.dump(opt_info, os.path.join(output_directory, "step2.json")) conv_info = conversion_info.compose(conv_info, opt_info) conversion_info.dump(conv_info, os.path.join(output_directory, "conversion.json")) nnef_io.write( h, os.path.join(output_directory, network_name + (".nnef.tgz" if compress else "_nnef"))) if activation_export_feed_dict: _load_graph(file_name) tf_activation_exporter.export( output_path=os.path.join(output_directory, "activations"), feed_dict=activation_export_feed_dict, conversion_info=conv_info, verbose=verbose, input_output_only=activation_export_io_only) if verbose: print("Done.")
def _print(tf_graph, file_handle, custom_op_protos=None, custom_imports=None, with_name_dict=False): # type: (TFGraph, typing.TextIO, typing.Optional[typing.List[OpProto]], str, bool)->None op_proto_by_name = { trf.op_proto.op_name: trf.op_proto for trf in DefaultTraceableFunctions } if custom_op_protos: op_proto_by_name.update( {op_proto.op_name: op_proto for op_proto in custom_op_protos}) printed_tensors = set( ) # we need this because 0d constants are not printed as tensors f = file_handle print("from __future__ import division, print_function, absolute_import", file=f) print("from collections import OrderedDict", file=f) print("import tensorflow as tf", file=f) if any(op.name.startswith("_tf.") for op in tf_graph.operations): print( "from nnef_tools.io.tensorflow.tf_py.tf_py_compat import tf_internal as _tf", file=f) if custom_imports: print(custom_imports, file=f) print(file=f) print(file=f) assert utils.is_identifier( tf_graph.name), "Graph name '{}' is not an identifier".format( tf_graph.name) print("def {}():".format(tf_graph.name), file=f) indent = " " * 4 for op in list(tf_graph.operations): assert op.name in op_proto_by_name, 'We have no op_proto for op: {}'.format( op.name) args = args_from_tfop(op, op_proto_by_name[op.name], allow_missing=True) for t in op.outputs: printed_tensors.add(t) print("{}{} = {}({})".format(indent, _format_result_names(op.outputs), op.name, _format_args(args)), file=f) print(file=f) if with_name_dict: inputs = ",\n{}{}".format(indent, indent).join( '("{}", {})'.format(name, _format_tensor_name(t.name)) for name, t in zip(tf_graph.input_ids, tf_graph.inputs)) print("{}__inputs = OrderedDict([\n{}{}{}\n{}])".format( indent, indent, indent, inputs, indent), file=f) outputs = ",\n{}{}".format(indent, indent).join( '("{}", {})'.format(name, _format_tensor_name(t.name)) for name, t in zip(tf_graph.output_ids, tf_graph.outputs)) print("{}__outputs = OrderedDict([\n{}{}{}\n{}])".format( indent, indent, indent, outputs, indent), file=f) tensors = ",\n{}{}".format(indent, indent).join( '("{}", {})'.format(t.name, _format_tensor_name(t.name)) for t in sorted(tf_graph.tensors, key=lambda t: t.name) if t in printed_tensors) print("{}__tensors = OrderedDict([\n{}{}{}\n{}])".format( indent, indent, indent, tensors, indent), file=f) print(file=f) print("{}return __inputs, __outputs, __tensors".format(indent), file=f) else: outputs = ",\n{}{}".format(indent, indent).join( '("{}", {})'.format(name, _format_tensor_name(t.name)) for name, t in zip(tf_graph.output_ids, tf_graph.outputs)) print("{}return OrderedDict([\n{}{}{}\n{}])".format( indent, indent, indent, outputs, indent), file=f) print(file=f)