Ejemplo n.º 1
0
def from_tensorflow_frozen_model(frozen_file,
                                 output_nodes=[],
                                 preprocessor=None,
                                 **kwargs):
    """
    Converts a TensorFlow frozen graph to a UFF model.

    Args:
        frozen_file (str): The path to the frozen TensorFlow graph to convert.
        output_nodes (list(str)): The names of the outputs of the graph. If not provided, graphsurgeon is used to automatically deduce output nodes.
        output_filename (str): The UFF file to write.
        preprocessor (str): The path to a preprocessing script that will be executed before the converter. This script should define a ``preprocess`` function which accepts a graphsurgeon DynamicGraph and modifies it in place.
        write_preprocessed (bool): If set to True, the converter will write out the preprocessed graph as well as a TensorBoard visualization. Must be used in conjunction with output_filename.
        text (bool): If set to True, the converter will also write out a human readable UFF file. Must be used in conjunction with output_filename.
        quiet (bool): If set to True, suppresses informational messages. Errors may still be printed.
        list_nodes (bool): If set to True, the converter displays a list of all nodes present in the graph.
        debug_mode (bool): If set to True, the converter prints verbose debug messages.
        return_graph_info (bool): If set to True, this function returns the graph input and output nodes in addition to the serialized UFF graph.

    Returns:
        serialized UFF MetaGraph (str)

        OR, if return_graph_info is set to True,

        serialized UFF MetaGraph (str), graph inputs (list(tensorflow.NodeDef)), graph outputs (list(tensorflow.NodeDef))
    """
    graphdef = GraphDef()
    with tf.gfile.GFile(frozen_file, "rb") as frozen_pb:
        graphdef.ParseFromString(frozen_pb.read())
    return from_tensorflow(graphdef, output_nodes, preprocessor, **kwargs)
Ejemplo n.º 2
0
 def __post_init__(self, model_path: Path):
     self.graph = tf.Graph()
     with self.graph.as_default():
         od_graph_def = GraphDef()
         with GFile(str(model_path / "frozen_inference_graph.pb"),
                    "rb") as fid:
             serialized_graph = fid.read()
             od_graph_def.ParseFromString(serialized_graph)
             tf.import_graph_def(od_graph_def, name="")
Ejemplo n.º 3
0
    def __init__(self, graph: tf.GraphDef = None):
        """
    Wrap a tf.GraphDef protocol buffer in a Graph object.

    Args:
      graph: a tf.Graph or tf.GraphDef protobuf that represents a
        TensorFlow operator graph. If set to None, generate an empty
        tf.GraphDef
    """
        if graph is None:
            graph_def = tf.GraphDef()
        elif isinstance(graph, tf.GraphDef):
            graph_def = graph
        elif isinstance(graph, tf.Graph):
            graph_def = graph.as_graph_def()
        else:
            raise TypeError(
                "Graph is of type {}. Expected a tf.Graph or GraphDef "
                "proto".format(type(graph)))
        self._graph_def = graph_def
        self._next_id = 1
        output_map = _decode_graph(graph_def)
        self._immutable_nodes = [
            node.ImmutableNode(self, self._get_next_id(), n,
                               output_map[n.name]) for n in graph_def.node
        ]
        self._deleted_nodes = set()
        self._added_nodes = {}
        self._version = 0
        self._collections = {}
 def load_frozen_graph(self, model_path):
     '''
     Function to load the frozen protobuf file from the disk and parse it
     to retrieve the unserialized graph_def
     Arguments -
         model_path      : A string having the path of the tensorflow model(.pb).
     Returns -
         detection_graph : The unserialized graph_def that holds the network architecture.
     '''
     detection_graph = Graph()
     with detection_graph.as_default():
         od_graph_def = GraphDef()
         with gfile.GFile(model_path, 'rb') as fid:
             serialized_graph = fid.read()
             od_graph_def.ParseFromString(serialized_graph)
             import_graph_def(od_graph_def, name='')
     return detection_graph
Ejemplo n.º 5
0
def optimize_frozen_graph(logdir: str, frozen_graph: str):
    with FastGFile(path_join(logdir, frozen_graph),
                   mode='rb') as frozen_graph_file:
        frozen_graph_def = GraphDef()
        frozen_graph_def.ParseFromString(frozen_graph_file.read())
        optimized_frozen_graph_def = optimize_for_inference(
            frozen_graph_def, ["Reshape"], ["softmax"], [])
        optimized_frozen_graph_as_bytes = optimized_frozen_graph_def.SerializeToString(
        )

    optimized_frozen_graph = frozen_graph.replace("_frozen",
                                                  "_frozen_optimized")
    optimized_frozen_graph_path = path_join(logdir, optimized_frozen_graph)
    with open(optimized_frozen_graph_path,
              mode='wb') as optimized_frozen_graph_file:
        optimized_frozen_graph_file.write(optimized_frozen_graph_as_bytes)

    return optimized_frozen_graph_path
Ejemplo n.º 6
0
    def Model_from_Kinetica(self, Model_ID):
        from tensorflow import GraphDef, Graph, import_graph_def
        h_db = self.h_db
        response = h_db.get_records(
            table_name='TFmodel',
            encoding="binary",
            options={'expression': "model_id=\"" + Model_ID + "\""})
        records = gpudb.GPUdbRecord.decode_binary_data(
            response["type_schema"], response["records_binary"])
        record = records[0]
        record["model_binary"]
        graph_def = GraphDef()
        graph_def.ParseFromString(record["model_binary"])

        graph = Graph()
        with graph.as_default():
            # The name var will prefix every op/nodes in your graph
            # Since we load everything in a new graph, this is not needed
            import_graph_def(graph_def)
        return graph
Ejemplo n.º 7
0
    def __init__(
            self,
            g: tf.GraphDef = None,
            collections: Iterable[tf.MetaGraphDef.CollectionDefEntry] = None):
        """
    Wrap a tf.GraphDef protocol buffer in a Graph object.

    Args:
      g: a tf.Graph or tf.GraphDef protobuf that represents a
        TensorFlow graph. If set to None, generate an empty
        tf.GraphDef
      collections: Optional iterable of tf.MetaGraphDef.CollectionDefEntry 
        objects containing information about collections in the graph.
        Note that this constructor will pull collection info out of `g` if
        it is a `tf.Graph` and `collections` is `None`.
    """
        if g is None:
            graph_def = tf.GraphDef()
        elif isinstance(g, tf.GraphDef):
            graph_def = g
        elif isinstance(g, tf.Graph):
            graph_def = g.as_graph_def()
            if collections is None:
                collections = _make_collection_defs(g)
        else:
            raise TypeError(
                "Graph is of type {}. Expected a tf.Graph or GraphDef "
                "proto".format(type(g)))
        self._version = 0  # Must happen first; other init code needs self._version
        self._frozen = False
        self._graph_def = graph_def
        self._next_id = 1
        output_map = _decode_graph(graph_def)
        self._node_name_to_node = {}  # Dict[str, node.Node]; key is node name
        self._node_to_frame_names = None
        self._frame_name_to_nodes = None
        self._head_name_to_coloc_group = None  # Dict[str, FrozenList[str]]
        self._variable_name_to_variable = {}  # Dict[str, Variable]

        # Load nodes in three passes because the g may contain cycles.
        for node_def in graph_def.node:
            self.add_node_from_node_def(node_def, set_inputs=False)
        for node_def in graph_def.node:
            self[node_def.name].set_outputs_from_pairs(
                output_map[node_def.name])
        for node_def in graph_def.node:
            self[node_def.name].set_inputs_from_strings(
                node_def.input, set_control_inputs=True)

        self._collections = {}
        if collections is not None:
            for c in collections:
                self.add_collection_from_collection_def(c)
Ejemplo n.º 8
0
def strip_consts(graph_def, max_const_size=32):
    """Strip large constant values from graph_def."""
    strip_def = GraphDef()
    for n0 in graph_def.node:
        n = strip_def.node.add()
        n.MergeFrom(n0)
        if n.op == 'Const':
            tensor = n.attr['value'].tensor
            size = len(tensor.tensor_content)
            if size > max_const_size:
                tensor.tensor_content = "<stripped %d bytes>" % size
    return strip_def
Ejemplo n.º 9
0
def main(args):
    with GFile(args.frozen_model_path, "rb") as f:
        graph_def = GraphDef()
        graph_def.ParseFromString(f.read())

    with Session() as sess:
        # Then, we import the graph_def into a new Graph and returns it
        with Graph().as_default() as graph:
            import_graph_def(graph_def, name='')
            signature = predict_signature_def(
                inputs={'image_batch': graph.get_tensor_by_name('image_batch:0'),
                        'phase_train': graph.get_tensor_by_name('phase_train:0')},
                outputs={'embeddings': graph.get_tensor_by_name('embeddings:0')}
            )

            builder = saved_model_builder.SavedModelBuilder(args.output_model_dir)
            builder.add_meta_graph_and_variables(
                sess=sess,
                tags=[tag_constants.SERVING],
                signature_def_map={'serving_default': signature}
            )
            builder.save()
Ejemplo n.º 10
0
    def __init__(self, name, batch_size=64):
        self._graph = tf.Graph()

        with self._graph.as_default():
            if isinstance(name, STRING_TYPES) and '/' not in name:
                model = load_model(name)
            elif not os.path.exists(name):
                raise Exception(
                    "Argument is neither a valid module name nor a path to an existing file/folder: {}"
                    .format(name))
            else:
                if not os.path.isdir(name):
                    with open(name, 'rb') as f:
                        model = f.read()
                else:
                    model = {}
                    with open(os.path.join(name, 'meta.json')) as f:
                        model['meta'] = json.load(f)
                    with open(os.path.join(name, 'model.pb'), 'rb') as f:
                        model['model'] = f.read()
                    with codecs.open(os.path.join(name, 'vocab.txt'),
                                     encoding='utf-8') as f:
                        model['vocab'] = f.read()

            if isinstance(model, dict):
                graph_def = GraphDef.FromString(model['model'])
            else:
                graph_def = GraphDef.FromString(model)
            tf.import_graph_def(graph_def, name='')

        self._sess = Session(graph=self._graph)
        if not isinstance(model, dict):
            # Older model format (for ELMo-based models)
            self._chars = self._graph.get_tensor_by_name('chars:0')
            self._charts = self._graph.get_tensor_by_name('charts:0')
            self._label_vocab = LABEL_VOCAB
            self._language_code = 'en'
            self._provides_tags = False
            self._make_feed_dict = self._make_feed_dict_elmo
        else:
            # Newer model format (for BERT-based models)
            meta = model['meta']
            # Label vocab is made immutable because it is potentially exposed to
            # users through the spacy plugin
            self._label_vocab = tuple(
                [tuple(label) for label in meta['label_vocab']])
            self._language_code = meta['language_code']
            self._provides_tags = meta['provides_tags']

            self._input_ids = self._graph.get_tensor_by_name('input_ids:0')
            self._word_end_mask = self._graph.get_tensor_by_name(
                'word_end_mask:0')
            self._charts = self._graph.get_tensor_by_name('charts:0')
            if self._provides_tags:
                self._tag_vocab = meta['tag_vocab']
                self._tags = self._graph.get_tensor_by_name('tags:0')

            self._bert_tokenizer = BertTokenizer(
                model['vocab'], do_lower_case=meta['bert_do_lower_case'])
            self._make_feed_dict = self._make_feed_dict_bert

        self.batch_size = batch_size
Ejemplo n.º 11
0
                        help='Compiler input node names')
    parser.add_argument('--c_output_nodes',
                        default=None,
                        help='Compiler output node names')
    args = dict2attr(parser.parse_args())

    if not args.pre_process:
        raise ValueError(
            'please provide --pre_process input. Valid option: resnet50, inception_v1, inception_v3, inception_v4, squeezenet'
        )

    if args.quantize:
        from tensorflow import GraphDef
        from tensorflow.contrib import decent_q

        input_graph_def = GraphDef()
        with open(args.model, "rb") as f:
            input_graph_def.ParseFromString(f.read())

        if os.path.isdir(args.output_dir):
            print('Cleaning model artifacts in {}'.format(
                os.path.abspath(args.output_dir)))
            filesToClean = [
                os.path.join(os.path.abspath(args.output_dir), f)
                for f in os.listdir(args.output_dir)
            ]
            for f in filesToClean:
                if os.path.isfile(f):
                    os.remove(f)
                elif os.path.isdir(f):
                    rmtree(f)